Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: some config fields are not loading #185

Merged
merged 17 commits into from
Dec 2, 2022
Merged
Show file tree
Hide file tree
Changes from 13 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
215 changes: 215 additions & 0 deletions integration/flags/.snapshots/TestInitCommand-init
Original file line number Diff line number Diff line change
Expand Up @@ -200,6 +200,221 @@ scan:
disable-domain-resolution: false
domain-resolution-timeout: 3s
internal-domains: []
policies:
application_level_encryption_missing:
query: |
critical = data.bearer.application_level_encryption.critical
high = data.bearer.application_level_encryption.high
id: detect_sql_create_public_table
name: Application level encryption missing
description: Application level encryption missing
level: ""
modules:
- path: policies/application_level_encryption.rego
name: bearer.application_level_encryption
content: "package bearer.application_level_encryption\n\nimport future.keywords\n\nsensitive_data_group_uuid := \"f6a0c071-5908-4420-bac2-bba28d41223e\"\npersonal_data_group_uuid := \"e1d3135b-3c0f-4b55-abce-19f27a26cbb3\"\n\nhigh[item] {\n some datatype in input.dataflow.data_types \n some detector in datatype.detectors\n detector.name == input.policy_id\n \n some location in detector.locations\n not location.encrypted\n\n some category in input.data_categories\n category.uuid == datatype.category_uuid\n category.group_uuid == sensitive_data_group_uuid\n\n item = {\n \"category_group\": category.group_name,\n \"filename\": location.filename,\n \"line_number\": location.line_number,\n \"parent_line_number\": detector.parent.line_number,\n \"parent_content\": detector.parent.content\n\n }\n}\n\ncritical[item] {\n some datatype in input.dataflow.data_types \n some detector in datatype.detectors\n detector.name == input.policy_id\n \n some location in detector.locations\n not location.encrypted\n\n some category in input.data_categories\n category.uuid == datatype.category_uuid\n category.group_uuid == personal_data_group_uuid\n\n item = {\n \"category_group\": category.group_name,\n \"filename\": location.filename,\n \"line_number\": location.line_number,\n \"parent_line_number\": detector.parent.line_number,\n \"parent_content\": detector.parent.content\n }\n}"
insecure_smtp_processing_sensitive_data:
query: |
medium = data.bearer.insecure_smtp.medium
id: detect_rails_insecure_smtp
name: Insecure SMTP
description: Communication with insecure SMTP in an application processing sensitive data
level: ""
modules:
- path: policies/insecure_smtp.rego
name: bearer.insecure_smtp
content: |
package bearer.insecure_smtp

import future.keywords

medium[item] {
some detector in input.dataflow.risks
detector.detector_id == input.policy_id

location = detector.locations[_]
item := {
"category_group": "Insecure communication",
"filename": location.filename,
"line_number": location.line_number,
"parent_line_number": location.line_number,
"parent_content": location.content
}
}
jwt_leaks:
query: |
critical = data.bearer.leakage.critical
high = data.bearer.leakage.high
id: detect_rails_jwt
name: JWT leaking
description: JWT leaks detected
level: ""
modules:
- path: policies/leakage.rego
name: bearer.leakage
content: |
package bearer.leakage

import future.keywords

sensitive_data_group_uuid := "f6a0c071-5908-4420-bac2-bba28d41223e"
personal_data_group_uuid := "e1d3135b-3c0f-4b55-abce-19f27a26cbb3"

high[item] {
some detector in input.dataflow.risks
detector.detector_id == input.policy_id

data_type = detector.data_types[_]

some category in input.data_categories
category.uuid == data_type.category_uuid
category.group_uuid == sensitive_data_group_uuid

location = data_type.locations[_]
item := {
"category_group": category.group_name,
"filename": location.filename,
"line_number": location.line_number,
"parent_line_number": data_type.parent.line_number,
"parent_content": data_type.parent.content
}
}

critical[item] {
some detector in input.dataflow.risks
detector.detector_id == input.policy_id

data_type = detector.data_types[_]

some category in input.data_categories
category.uuid == data_type.category_uuid
category.group_uuid == personal_data_group_uuid

location = data_type.locations[_]
item := {
"category_group": category.group_name,
"filename": location.filename,
"line_number": location.line_number,
"parent_line_number": data_type.parent.line_number,
"parent_content": data_type.parent.content
}
}
logger_leaks:
query: |
critical = data.bearer.leakage.critical
high = data.bearer.leakage.high
id: detect_ruby_logger
name: Logger leaking
description: Logger leaks detected
level: ""
modules:
- path: policies/leakage.rego
name: bearer.leakage
content: |
package bearer.leakage

import future.keywords

sensitive_data_group_uuid := "f6a0c071-5908-4420-bac2-bba28d41223e"
personal_data_group_uuid := "e1d3135b-3c0f-4b55-abce-19f27a26cbb3"

high[item] {
some detector in input.dataflow.risks
detector.detector_id == input.policy_id

data_type = detector.data_types[_]

some category in input.data_categories
category.uuid == data_type.category_uuid
category.group_uuid == sensitive_data_group_uuid

location = data_type.locations[_]
item := {
"category_group": category.group_name,
"filename": location.filename,
"line_number": location.line_number,
"parent_line_number": data_type.parent.line_number,
"parent_content": data_type.parent.content
}
}

critical[item] {
some detector in input.dataflow.risks
detector.detector_id == input.policy_id

data_type = detector.data_types[_]

some category in input.data_categories
category.uuid == data_type.category_uuid
category.group_uuid == personal_data_group_uuid

location = data_type.locations[_]
item := {
"category_group": category.group_name,
"filename": location.filename,
"line_number": location.line_number,
"parent_line_number": data_type.parent.line_number,
"parent_content": data_type.parent.content
}
}
session_leaks:
query: |
critical = data.bearer.leakage.critical
high = data.bearer.leakage.high
id: detect_rails_session
name: Session leaking
description: Session leaks detected
level: ""
modules:
- path: policies/leakage.rego
name: bearer.leakage
content: |
package bearer.leakage

import future.keywords

sensitive_data_group_uuid := "f6a0c071-5908-4420-bac2-bba28d41223e"
personal_data_group_uuid := "e1d3135b-3c0f-4b55-abce-19f27a26cbb3"

high[item] {
some detector in input.dataflow.risks
detector.detector_id == input.policy_id

data_type = detector.data_types[_]

some category in input.data_categories
category.uuid == data_type.category_uuid
category.group_uuid == sensitive_data_group_uuid

location = data_type.locations[_]
item := {
"category_group": category.group_name,
"filename": location.filename,
"line_number": location.line_number,
"parent_line_number": data_type.parent.line_number,
"parent_content": data_type.parent.content
}
}

critical[item] {
some detector in input.dataflow.risks
detector.detector_id == input.policy_id

data_type = detector.data_types[_]

some category in input.data_categories
category.uuid == data_type.category_uuid
category.group_uuid == personal_data_group_uuid

location = data_type.locations[_]
item := {
"category_group": category.group_name,
"filename": location.filename,
"line_number": location.line_number,
"parent_line_number": data_type.parent.line_number,
"parent_content": data_type.parent.content
}
}
quiet: false
skip-path: []
worker:
Expand Down
1 change: 1 addition & 0 deletions pkg/commands/init.go
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ func NewInitCommand() *cobra.Command {
return err
}
viper.Set(settings.CustomDetectorKey, globalSettings.CustomDetector)
viper.Set(settings.PoliciesKey, globalSettings.Policies)

viper.SetConfigFile("./curio.yml")
err = viper.WriteConfig()
Expand Down
64 changes: 34 additions & 30 deletions pkg/commands/process/settings/settings.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ type Config struct {
Worker flag.WorkerOptions `json:"worker" yaml:"worker"`
Scan flag.ScanOptions `json:"scan" yaml:"scan"`
Report flag.ReportOptions `json:"report" yaml:"report"`
CustomDetector map[string]Rule `json:"custom_detector" yaml:"custom_detector"`
CustomDetector map[string]Rule `mapstructure:"custom_detector" json:"custom_detector" yaml:"custom_detector"`
cfabianski marked this conversation as resolved.
Show resolved Hide resolved
Policies map[string]*Policy `json:"policies" yaml:"policies"`
Target string `json:"target" yaml:"target"`
}
Expand All @@ -31,18 +31,18 @@ var LevelLow = "low"
type Modules []*PolicyModule

type Policy struct {
Query string
Id string
Name string
Description string
Level PolicyLevel
Modules Modules
Query string `mapstructure:"query" json:"query" yaml:"query"`
Id string `mapstructure:"id" json:"id" yaml:"id"`
Name string `mapstructure:"name" json:"name" yaml:"name"`
Description string `mapstructure:"description" json:"description" yaml:"description"`
Level PolicyLevel `mapstructure:"level" json:"level" yaml:"level"`
Modules Modules `mapstructure:"modules" json:"modules" yaml:"modules"`
}

type PolicyModule struct {
Path string `yaml:"path,omitempty"`
Name string
Content string
Path string `mapstructure:"path" json:"path,omitempty" yaml:"path,omitempty"`
Name string `mapstructure:"name" json:"name" yaml:"name"`
Content string `mapstructure:"content" json:"content" yaml:"content"`
}

func (modules Modules) ToRegoModules() (output []rego.Module) {
Expand All @@ -56,30 +56,30 @@ func (modules Modules) ToRegoModules() (output []rego.Module) {
}

type Rule struct {
Disabled bool
Type string
Languages []string
Patterns []string
ParamParenting bool `yaml:"param_parenting"`
Processors []Processor

RootSingularize bool `yaml:"root_singularize"`
RootLowercase bool `yaml:"root_lowercase"`

Metavars map[string]MetaVar
Stored bool
DetectPresence bool `yaml:"detect_presence"`
Disabled bool `mapstructure:"disabled" json:"disabled" yaml:"disabled"`
Type string `mapstructure:"type" json:"type" yaml:"type"`
Languages []string `mapstructure:"languages" json:"languages" yaml:"languages"`
Patterns []string `mapstructure:"patterns" json:"patterns" yaml:"patterns"`
ParamParenting bool `mapstructure:"param_parenting" json:"param_parenting" yaml:"param_parenting"`
Processors []Processor `mapstructure:"processors" json:"processors" yaml:"processors"`

RootSingularize bool `mapstructure:"root_singularize" yaml:"root_singularize" `
RootLowercase bool `mapstructure:"root_lowercase" yaml:"root_lowercase"`

Metavars map[string]MetaVar `mapstructure:"metavars" json:"metavars" yaml:"metavars"`
Stored bool `mapstructure:"stored" json:"stored" yaml:"stored"`
DetectPresence bool `mapstructure:"detect_presence" json:"detect_presence" yaml:"detect_presence"`
}

type Processor struct {
Query string
Modules Modules
Query string `mapstructure:"query" json:"query" yaml:"query"`
Modules Modules `mapstructure:"modules" json:"modules" yaml:"modules"`
}

type MetaVar struct {
Input string
Output int
Regex string
Input string `mapstructure:"input" json:"input" yaml:"input"`
Output int `mapstructure:"output" json:"output" yaml:"output"`
Regex string `mapstructure:"regex" json:"regex" yaml:"regex"`
}

//go:embed custom_detector.yml
Expand All @@ -98,12 +98,14 @@ var CustomDetectorKey string = "scan.custom_detector"
var PoliciesKey string = "scan.policies"

func FromOptions(opts flag.Options) (Config, error) {
rules := DefaultCustomDetector()
var rules map[string]Rule
if viper.IsSet(CustomDetectorKey) {
err := viper.UnmarshalKey(CustomDetectorKey, &rules)
if err != nil {
return Config{}, err
}
} else {
rules = DefaultCustomDetector()
}

for _, customDetector := range rules {
Expand All @@ -121,12 +123,14 @@ func FromOptions(opts flag.Options) (Config, error) {
}
}

policies := DefaultPolicies()
var policies map[string]*Policy
if viper.IsSet(PoliciesKey) {
err := viper.UnmarshalKey(PoliciesKey, &policies)
if err != nil {
return Config{}, err
}
} else {
policies = DefaultPolicies()
}

for key := range policies {
Expand Down
2 changes: 1 addition & 1 deletion pkg/commands/scan.go
Original file line number Diff line number Diff line change
Expand Up @@ -89,8 +89,8 @@ func NewScanCommand() *cobra.Command {
}

func readConfig(configFile string) error {
viper.SetConfigFile(configFile)
viper.SetConfigType("yaml")
viper.SetConfigFile(configFile)
if err := viper.ReadInConfig(); err != nil {
if _, ok := err.(viper.ConfigFileNotFoundError); ok {
return nil
Expand Down
4 changes: 2 additions & 2 deletions pkg/flag/policy_flags.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@ type PolicyFlagGroup struct {
}

type PolicyOptions struct {
SkipPolicy map[string]bool `json:"skip_policy" yaml:"skip_policy"`
OnlyPolicy map[string]bool `json:"only_policy" yaml:"only_policy"`
SkipPolicy map[string]bool `mapstructure:"skip-policy" json:"skip-policy" yaml:"skip-policy"`
OnlyPolicy map[string]bool `mapstructure:"only-policy" json:"only-policy" yaml:"only-policy"`
}

func NewPolicyFlagGroup() *PolicyFlagGroup {
Expand Down
2 changes: 1 addition & 1 deletion pkg/flag/process_flags.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ type ProcessFlagGroup struct {
}

type ProcessOptions struct {
Port string
Port string `mapstructure:"port" json:"port" yaml:"port"`
}

func NewProcessGroup() *ProcessFlagGroup {
Expand Down
6 changes: 3 additions & 3 deletions pkg/flag/report_flags.go
Original file line number Diff line number Diff line change
Expand Up @@ -41,9 +41,9 @@ type ReportFlagGroup struct {
}

type ReportOptions struct {
Format string
Report string
Output string
Format string `mapstructure:"format" json:"format" yaml:"format"`
Report string `mapstructure:"report" json:"report" yaml:"report"`
Output string `mapstructure:"output" json:"output" yaml:"output"`
}

func NewReportFlagGroup() *ReportFlagGroup {
Expand Down