diff --git a/integration/flags/.snapshots/TestInitCommand-init b/integration/flags/.snapshots/TestInitCommand-init index e670160a7..828811ac0 100644 --- a/integration/flags/.snapshots/TestInitCommand-init +++ b/integration/flags/.snapshots/TestInitCommand-init @@ -13,14 +13,14 @@ scan: type: verifier languages: - ruby + param_parenting: true + processors: [] patterns: - pattern: | class $CLASS_NAME < ApplicationRecord encrypts <$ARGUMENT> end filters: [] - param_parenting: true - processors: [] root_singularize: true root_lowercase: true metavars: {} @@ -49,6 +49,8 @@ scan: type: risk languages: - ruby + param_parenting: false + processors: [] patterns: - pattern: | Rails.application.configure do @@ -64,8 +66,6 @@ scan: } end filters: [] - param_parenting: false - processors: [] root_singularize: false root_lowercase: false metavars: {} @@ -76,12 +76,12 @@ scan: type: risk languages: - ruby + param_parenting: false + processors: [] patterns: - pattern: | JWT.encode(<$ARGUMENT>) filters: [] - param_parenting: false - processors: [] root_singularize: false root_lowercase: false metavars: {} @@ -92,12 +92,12 @@ scan: type: risk languages: - ruby + param_parenting: false + processors: [] patterns: - pattern: | session[...] = $ANYTHING filters: [] - param_parenting: false - processors: [] root_singularize: false root_lowercase: false metavars: {} @@ -108,6 +108,8 @@ scan: type: risk languages: - ruby + param_parenting: false + processors: [] patterns: - pattern: | logger.info(<$ARGUMENT>) @@ -115,8 +117,6 @@ scan: - pattern: | Rails.logger.info(<$ARGUMENT>) filters: [] - param_parenting: false - processors: [] root_singularize: false root_lowercase: false metavars: {} @@ -127,12 +127,6 @@ scan: type: data_type languages: - sql - patterns: - - pattern: | - CREATE TABLE public.$TABLE_NAME ( - <$COLUMN> - ) - filters: [] param_parenting: true processors: - query: | @@ -175,6 +169,12 @@ scan: "line_number": detection.source.line_number } } + patterns: + - pattern: | + CREATE TABLE public.$TABLE_NAME ( + <$COLUMN> + ) + filters: [] root_singularize: true root_lowercase: true metavars: {} @@ -185,6 +185,8 @@ scan: type: risk languages: - ruby + param_parenting: true + processors: [] patterns: - pattern: | CSV.open { <$DATA_TYPE> } @@ -202,8 +204,6 @@ scan: - pattern: | File.open { <$DATA_TYPE> } filters: [] - param_parenting: true - processors: [] root_singularize: false root_lowercase: false metavars: {} @@ -214,6 +214,8 @@ scan: type: risk languages: - ruby + param_parenting: false + processors: [] patterns: - pattern: | URI.encode_www_form(<$DATA_TYPE>) @@ -234,8 +236,6 @@ scan: - HTTPX - RestClient - Typhoeus - param_parenting: false - processors: [] root_singularize: false root_lowercase: false metavars: {} @@ -246,6 +246,8 @@ scan: type: risk languages: - ruby + param_parenting: false + processors: [] patterns: - pattern: | URI.encode_www_form(<$DATA_TYPE>) @@ -266,8 +268,6 @@ scan: - HTTPX - RestClient - Typhoeus - param_parenting: false - processors: [] root_singularize: false root_lowercase: false metavars: {} @@ -293,6 +293,267 @@ scan: disable-domain-resolution: true domain-resolution-timeout: 3s internal-domains: [] + policies: + application_level_encryption_missing: + query: | + critical = data.bearer.application_level_encryption.critical + high = data.bearer.application_level_encryption.high + id: detect_sql_create_public_table + name: Application level encryption missing + description: Application level encryption missing + level: "" + modules: + - path: policies/application_level_encryption.rego + name: bearer.application_level_encryption + content: "package bearer.application_level_encryption\n\nimport future.keywords\n\nsensitive_data_group_uuid := \"f6a0c071-5908-4420-bac2-bba28d41223e\"\npersonal_data_group_uuid := \"e1d3135b-3c0f-4b55-abce-19f27a26cbb3\"\n\nhigh[item] {\n some datatype in input.dataflow.data_types \n some detector in datatype.detectors\n detector.name == input.policy_id\n \n some location in detector.locations\n not location.encrypted\n\n some category in input.data_categories\n category.uuid == datatype.category_uuid\n category.group_uuid == sensitive_data_group_uuid\n\n item = {\n \"category_group\": category.group_name,\n \"filename\": location.filename,\n \"line_number\": location.line_number,\n \"parent_line_number\": detector.parent.line_number,\n \"parent_content\": detector.parent.content\n\n }\n}\n\ncritical[item] {\n some datatype in input.dataflow.data_types \n some detector in datatype.detectors\n detector.name == input.policy_id\n \n some location in detector.locations\n not location.encrypted\n\n some category in input.data_categories\n category.uuid == datatype.category_uuid\n category.group_uuid == personal_data_group_uuid\n\n item = {\n \"category_group\": category.group_name,\n \"filename\": location.filename,\n \"line_number\": location.line_number,\n \"parent_line_number\": detector.parent.line_number,\n \"parent_content\": detector.parent.content\n }\n}" + http_get_parameters: + query: | + critical = data.bearer.http_get_parameters.critical + high = data.bearer.http_get_parameters.high + id: ruby_http_get_detection + name: HTTP GET parameters + description: Sending data as HTTP GET parameters + level: "" + modules: + - path: policies/http_get_parameters.rego + name: bearer.http_get_parameters + content: | + package bearer.http_get_parameters + + import future.keywords + + sensitive_data_group_uuid := "f6a0c071-5908-4420-bac2-bba28d41223e" + personal_data_group_uuid := "e1d3135b-3c0f-4b55-abce-19f27a26cbb3" + + item_in_data_category contains [category_group_uuid, item] if { + some detector in input.dataflow.risks + detector.detector_id == input.policy_id + + data_type = detector.data_types[_] + + some category in input.data_categories + category.uuid == data_type.category_uuid + category_group_uuid := category.group_uuid + + location = data_type.locations[_] + item := { + "category_group": category.group_name, + "filename": location.filename, + "line_number": location.line_number, + "parent_line_number": data_type.parent.line_number, + "parent_content": data_type.parent.content + } + } + + high[item] { + item_in_data_category[[sensitive_data_group_uuid, item]] + } + + critical[item] { + item_in_data_category[[personal_data_group_uuid, item]] + } + insecure_smtp_processing_sensitive_data: + query: | + medium = data.bearer.insecure_smtp.medium + id: detect_rails_insecure_smtp + name: Insecure SMTP + description: Communication with insecure SMTP in an application processing sensitive data + level: "" + modules: + - path: policies/insecure_smtp.rego + name: bearer.insecure_smtp + content: | + package bearer.insecure_smtp + + import future.keywords + + medium[item] { + some detector in input.dataflow.risks + detector.detector_id == input.policy_id + + location = detector.locations[_] + item := { + "category_group": "Insecure communication", + "filename": location.filename, + "line_number": location.line_number, + "parent_line_number": location.line_number, + "parent_content": location.content + } + } + jwt_leaks: + query: | + critical = data.bearer.leakage.critical + high = data.bearer.leakage.high + id: detect_rails_jwt + name: JWT leaking + description: JWT leaks detected + level: "" + modules: + - path: policies/leakage.rego + name: bearer.leakage + content: | + package bearer.leakage + + import future.keywords + + sensitive_data_group_uuid := "f6a0c071-5908-4420-bac2-bba28d41223e" + personal_data_group_uuid := "e1d3135b-3c0f-4b55-abce-19f27a26cbb3" + + high[item] { + some detector in input.dataflow.risks + detector.detector_id == input.policy_id + + data_type = detector.data_types[_] + + some category in input.data_categories + category.uuid == data_type.category_uuid + category.group_uuid == sensitive_data_group_uuid + + location = data_type.locations[_] + item := { + "category_group": category.group_name, + "filename": location.filename, + "line_number": location.line_number, + "parent_line_number": location.parent.line_number, + "parent_content": location.parent.content + } + } + + critical[item] { + some detector in input.dataflow.risks + detector.detector_id == input.policy_id + + data_type = detector.data_types[_] + + some category in input.data_categories + category.uuid == data_type.category_uuid + category.group_uuid == personal_data_group_uuid + + location = data_type.locations[_] + item := { + "category_group": category.group_name, + "filename": location.filename, + "line_number": location.line_number, + "parent_line_number": location.parent.line_number, + "parent_content": location.parent.content + } + } + logger_leaks: + query: | + critical = data.bearer.leakage.critical + high = data.bearer.leakage.high + id: detect_ruby_logger + name: Logger leaking + description: Logger leaks detected + level: "" + modules: + - path: policies/leakage.rego + name: bearer.leakage + content: | + package bearer.leakage + + import future.keywords + + sensitive_data_group_uuid := "f6a0c071-5908-4420-bac2-bba28d41223e" + personal_data_group_uuid := "e1d3135b-3c0f-4b55-abce-19f27a26cbb3" + + high[item] { + some detector in input.dataflow.risks + detector.detector_id == input.policy_id + + data_type = detector.data_types[_] + + some category in input.data_categories + category.uuid == data_type.category_uuid + category.group_uuid == sensitive_data_group_uuid + + location = data_type.locations[_] + item := { + "category_group": category.group_name, + "filename": location.filename, + "line_number": location.line_number, + "parent_line_number": location.parent.line_number, + "parent_content": location.parent.content + } + } + + critical[item] { + some detector in input.dataflow.risks + detector.detector_id == input.policy_id + + data_type = detector.data_types[_] + + some category in input.data_categories + category.uuid == data_type.category_uuid + category.group_uuid == personal_data_group_uuid + + location = data_type.locations[_] + item := { + "category_group": category.group_name, + "filename": location.filename, + "line_number": location.line_number, + "parent_line_number": location.parent.line_number, + "parent_content": location.parent.content + } + } + session_leaks: + query: | + critical = data.bearer.leakage.critical + high = data.bearer.leakage.high + id: detect_rails_session + name: Session leaking + description: Session leaks detected + level: "" + modules: + - path: policies/leakage.rego + name: bearer.leakage + content: | + package bearer.leakage + + import future.keywords + + sensitive_data_group_uuid := "f6a0c071-5908-4420-bac2-bba28d41223e" + personal_data_group_uuid := "e1d3135b-3c0f-4b55-abce-19f27a26cbb3" + + high[item] { + some detector in input.dataflow.risks + detector.detector_id == input.policy_id + + data_type = detector.data_types[_] + + some category in input.data_categories + category.uuid == data_type.category_uuid + category.group_uuid == sensitive_data_group_uuid + + location = data_type.locations[_] + item := { + "category_group": category.group_name, + "filename": location.filename, + "line_number": location.line_number, + "parent_line_number": location.parent.line_number, + "parent_content": location.parent.content + } + } + + critical[item] { + some detector in input.dataflow.risks + detector.detector_id == input.policy_id + + data_type = detector.data_types[_] + + some category in input.data_categories + category.uuid == data_type.category_uuid + category.group_uuid == personal_data_group_uuid + + location = data_type.locations[_] + item := { + "category_group": category.group_name, + "filename": location.filename, + "line_number": location.line_number, + "parent_line_number": location.parent.line_number, + "parent_content": location.parent.content + } + } quiet: false skip-path: [] worker: diff --git a/pkg/commands/init.go b/pkg/commands/init.go index 1348d96aa..676781dc0 100644 --- a/pkg/commands/init.go +++ b/pkg/commands/init.go @@ -27,6 +27,7 @@ func NewInitCommand() *cobra.Command { return err } viper.Set(settings.CustomDetectorKey, globalSettings.CustomDetector) + viper.Set(settings.PoliciesKey, globalSettings.Policies) viper.SetConfigFile("./curio.yml") err = viper.WriteConfig() diff --git a/pkg/commands/process/settings/settings.go b/pkg/commands/process/settings/settings.go index 38a7f1d96..262759013 100644 --- a/pkg/commands/process/settings/settings.go +++ b/pkg/commands/process/settings/settings.go @@ -13,12 +13,12 @@ import ( ) type Config struct { - Worker flag.WorkerOptions `json:"worker" yaml:"worker"` - Scan flag.ScanOptions `json:"scan" yaml:"scan"` - Report flag.ReportOptions `json:"report" yaml:"report"` - CustomDetector map[string]Rule `json:"custom_detector" yaml:"custom_detector"` - Policies map[string]*Policy `json:"policies" yaml:"policies"` - Target string `json:"target" yaml:"target"` + Worker flag.WorkerOptions `mapstructure:"worker" json:"worker" yaml:"worker"` + Scan flag.ScanOptions `mapstructure:"scan" json:"scan" yaml:"scan"` + Report flag.ReportOptions `mapstructure:"report" json:"report" yaml:"report"` + CustomDetector map[string]Rule `mapstructure:"custom_detector" json:"custom_detector" yaml:"custom_detector"` + Policies map[string]*Policy `mapstructure:"policies" json:"policies" yaml:"policies"` + Target string `mapstructure:"target" json:"target" yaml:"target"` } type PolicyLevel string @@ -31,18 +31,18 @@ var LevelLow = "low" type Modules []*PolicyModule type Policy struct { - Query string - Id string - Name string - Description string - Level PolicyLevel - Modules Modules + Query string `mapstructure:"query" json:"query" yaml:"query"` + Id string `mapstructure:"id" json:"id" yaml:"id"` + Name string `mapstructure:"name" json:"name" yaml:"name"` + Description string `mapstructure:"description" json:"description" yaml:"description"` + Level PolicyLevel `mapstructure:"level" json:"level" yaml:"level"` + Modules Modules `mapstructure:"modules" json:"modules" yaml:"modules"` } type PolicyModule struct { - Path string `yaml:"path,omitempty"` - Name string - Content string + Path string `mapstructure:"path" json:"path,omitempty" yaml:"path,omitempty"` + Name string `mapstructure:"name" json:"name" yaml:"name"` + Content string `mapstructure:"content" json:"content" yaml:"content"` } func (modules Modules) ToRegoModules() (output []rego.Module) { @@ -66,30 +66,30 @@ type RulePattern struct { } type Rule struct { - Disabled bool - Type string - Languages []string - Patterns []RulePattern - ParamParenting bool `yaml:"param_parenting"` - Processors []Processor - - RootSingularize bool `yaml:"root_singularize"` - RootLowercase bool `yaml:"root_lowercase"` - - Metavars map[string]MetaVar - Stored bool - DetectPresence bool `yaml:"detect_presence"` + Disabled bool `mapstructure:"disabled" json:"disabled" yaml:"disabled"` + Type string `mapstructure:"type" json:"type" yaml:"type"` + Languages []string `mapstructure:"languages" json:"languages" yaml:"languages"` + ParamParenting bool `mapstructure:"param_parenting" json:"param_parenting" yaml:"param_parenting"` + Processors []Processor `mapstructure:"processors" json:"processors" yaml:"processors"` + Patterns []RulePattern `mapstructure:"patterns" json:"patterns" yaml:"patterns"` + + RootSingularize bool `mapstructure:"root_singularize" yaml:"root_singularize" ` + RootLowercase bool `mapstructure:"root_lowercase" yaml:"root_lowercase"` + + Metavars map[string]MetaVar `mapstructure:"metavars" json:"metavars" yaml:"metavars"` + Stored bool `mapstructure:"stored" json:"stored" yaml:"stored"` + DetectPresence bool `mapstructure:"detect_presence" json:"detect_presence" yaml:"detect_presence"` } type Processor struct { - Query string - Modules Modules + Query string `mapstructure:"query" json:"query" yaml:"query"` + Modules Modules `mapstructure:"modules" json:"modules" yaml:"modules"` } type MetaVar struct { - Input string - Output int - Regex string + Input string `mapstructure:"input" json:"input" yaml:"input"` + Output int `mapstructure:"output" json:"output" yaml:"output"` + Regex string `mapstructure:"regex" json:"regex" yaml:"regex"` } //go:embed custom_detector.yml @@ -108,12 +108,14 @@ var CustomDetectorKey string = "scan.custom_detector" var PoliciesKey string = "scan.policies" func FromOptions(opts flag.Options) (Config, error) { - rules := DefaultCustomDetector() + var rules map[string]Rule if viper.IsSet(CustomDetectorKey) { err := viper.UnmarshalKey(CustomDetectorKey, &rules) if err != nil { return Config{}, err } + } else { + rules = DefaultCustomDetector() } for _, customDetector := range rules { @@ -131,12 +133,14 @@ func FromOptions(opts flag.Options) (Config, error) { } } - policies := DefaultPolicies() + var policies map[string]*Policy if viper.IsSet(PoliciesKey) { err := viper.UnmarshalKey(PoliciesKey, &policies) if err != nil { return Config{}, err } + } else { + policies = DefaultPolicies() } for key := range policies { diff --git a/pkg/commands/scan.go b/pkg/commands/scan.go index f3e8ab27c..cfbe87c08 100644 --- a/pkg/commands/scan.go +++ b/pkg/commands/scan.go @@ -89,8 +89,8 @@ func NewScanCommand() *cobra.Command { } func readConfig(configFile string) error { - viper.SetConfigFile(configFile) viper.SetConfigType("yaml") + viper.SetConfigFile(configFile) if err := viper.ReadInConfig(); err != nil { if _, ok := err.(viper.ConfigFileNotFoundError); ok { return nil diff --git a/pkg/flag/policy_flags.go b/pkg/flag/policy_flags.go index 19e8afdb0..375454c7b 100644 --- a/pkg/flag/policy_flags.go +++ b/pkg/flag/policy_flags.go @@ -21,8 +21,8 @@ type PolicyFlagGroup struct { } type PolicyOptions struct { - SkipPolicy map[string]bool `json:"skip_policy" yaml:"skip_policy"` - OnlyPolicy map[string]bool `json:"only_policy" yaml:"only_policy"` + SkipPolicy map[string]bool `mapstructure:"skip-policy" json:"skip-policy" yaml:"skip-policy"` + OnlyPolicy map[string]bool `mapstructure:"only-policy" json:"only-policy" yaml:"only-policy"` } func NewPolicyFlagGroup() *PolicyFlagGroup { diff --git a/pkg/flag/process_flags.go b/pkg/flag/process_flags.go index 75b6f5025..ffd441136 100644 --- a/pkg/flag/process_flags.go +++ b/pkg/flag/process_flags.go @@ -15,7 +15,7 @@ type ProcessFlagGroup struct { } type ProcessOptions struct { - Port string + Port string `mapstructure:"port" json:"port" yaml:"port"` } func NewProcessGroup() *ProcessFlagGroup { diff --git a/pkg/flag/report_flags.go b/pkg/flag/report_flags.go index 154e18569..59fcbc97d 100644 --- a/pkg/flag/report_flags.go +++ b/pkg/flag/report_flags.go @@ -41,9 +41,9 @@ type ReportFlagGroup struct { } type ReportOptions struct { - Format string - Report string - Output string + Format string `mapstructure:"format" json:"format" yaml:"format"` + Report string `mapstructure:"report" json:"report" yaml:"report"` + Output string `mapstructure:"output" json:"output" yaml:"output"` } func NewReportFlagGroup() *ReportFlagGroup { diff --git a/pkg/flag/repository_flags.go b/pkg/flag/repository_flags.go index dfbffa8e6..9cb793161 100644 --- a/pkg/flag/repository_flags.go +++ b/pkg/flag/repository_flags.go @@ -28,9 +28,9 @@ type RepoFlagGroup struct { } type RepoOptions struct { - RepoBranch string - RepoCommit string - RepoTag string + RepoBranch string `mapstructure:"branch" json:"branch" yaml:"branch"` + RepoCommit string `mapstructure:"commit" json:"commit" yaml:"commit"` + RepoTag string `mapstructure:"tag" json:"tag" yaml:"tag"` } func NewRepoFlagGroup() *RepoFlagGroup { diff --git a/pkg/flag/scan_flags.go b/pkg/flag/scan_flags.go index 735bc3ff8..df3e5f6e3 100644 --- a/pkg/flag/scan_flags.go +++ b/pkg/flag/scan_flags.go @@ -67,14 +67,14 @@ type ScanFlagGroup struct { } type ScanOptions struct { - Target string `json:"target" yaml:"target"` - SkipPath []string `json:"skip_path" yaml:"skip_path"` - Debug bool `json:"debug" yaml:"debug"` - DisableDomainResolution bool `json:"disable_domain_resolution" yaml:"disable_domain_resolution"` - DomainResolutionTimeout time.Duration `json:"domain_resolution_timeout" yaml:"domain_resolution_timeout"` - InternalDomains []string `json:"internal_domains" yaml:"internal_domains"` - Context Context `json:"context" yaml:"context"` - Quiet bool `json:"quiet" yaml:"quiet"` + Target string `mapstructure:"target" json:"target" yaml:"target"` + SkipPath []string `mapstructure:"skip-path" json:"skip-path" yaml:"skip-path"` + Debug bool `mapstructure:"debug" json:"debug" yaml:"debug"` + DisableDomainResolution bool `mapstructure:"disable-domain-resolution" json:"disable-domain-resolution" yaml:"disable-domain-resolution"` + DomainResolutionTimeout time.Duration `mapstructure:"domain-resolution-timeout" json:"domain-resolution-timeout" yaml:"domain-resolution-timeout"` + InternalDomains []string `mapstructure:"internal-domains" json:"internal-domains" yaml:"internal-domains"` + Context Context `mapstructure:"context" json:"context" yaml:"context"` + Quiet bool `mapstructure:"quiet" json:"quiet" yaml:"quiet"` } func NewScanFlagGroup() *ScanFlagGroup { diff --git a/pkg/flag/worker_flags.go b/pkg/flag/worker_flags.go index e5dd6fcda..4578b5319 100644 --- a/pkg/flag/worker_flags.go +++ b/pkg/flag/worker_flags.go @@ -82,16 +82,16 @@ type WorkerFlagGroup struct { // GlobalOptions defines flags and other configuration parameters for all the subcommands type WorkerOptions struct { - Workers int `json:"workers" yaml:"workers"` - Timeout time.Duration `json:"timeout" yaml:"timeout"` - TimeoutFileMinimum time.Duration `json:"timeout_file_minimum" yaml:"timeout_file_minimum"` - TimeoutFileMaximum time.Duration `json:"timeout_file_maximum" yaml:"timeout_file_maximum"` - TimeoutFileSecondPerBytes int `json:"timeout_file_second_per_bytes" yaml:"timeout_file_second_per_bytes"` - TimeoutWorkerOnline time.Duration `json:"timeout_worker_online" yaml:"timeout_worker_online"` - FileSizeMaximum int `json:"file_size_maximum" yaml:"file_size_maximum"` - FilesToBatch int `json:"files_to_batch" yaml:"files_to_batch"` - MemoryMaximum int `json:"memory_maximum" yaml:"memory_maximum"` - ExistingWorker string `json:"existing_worker" yaml:"existing_worker"` + Workers int `mapstructure:"workers" json:"workers" yaml:"workers"` + Timeout time.Duration `mapstructure:"timeout" json:"timeout" yaml:"timeout"` + TimeoutFileMinimum time.Duration `mapstructure:"timeout-file-min" json:"timeout-file-min" yaml:"timeout-file-min"` + TimeoutFileMaximum time.Duration `mapstructure:"timeout-file-max" json:"timeout-file-max" yaml:"timeout-file-max"` + TimeoutFileSecondPerBytes int `mapstructure:"timeout-file-second-per-bytes" json:"timeout-file-second-per-bytes" yaml:"timeout-file-second-per-bytes"` + TimeoutWorkerOnline time.Duration `mapstructure:"timeout-worker-online" json:"timeout-worker-online" yaml:"timeout-worker-online"` + FileSizeMaximum int `mapstructure:"file-size-max" json:"file-size-max" yaml:"file-size-max"` + FilesToBatch int `mapstructure:"files-to-batch" json:"files-to-batch" yaml:"files-to-batch"` + MemoryMaximum int `mapstructure:"memory-max" json:"memory-max" yaml:"memory-max"` + ExistingWorker string `mapstructure:"existing-worker" json:"existing-worker" yaml:"existing-worker"` } func NewWorkerFlagGroup() *WorkerFlagGroup {