Skip to content

Commit

Permalink
Biases entries
Browse files Browse the repository at this point in the history
  • Loading branch information
TimmyBugcrowd committed Jun 24, 2024
1 parent c39d933 commit 2240e15
Show file tree
Hide file tree
Showing 3 changed files with 221 additions and 0 deletions.
57 changes: 57 additions & 0 deletions mappings/cvss_v3/cvss_v3.json
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,63 @@
}
]
},
{
"id": "data_biases",
"children": [
{
"id": "representation_bias",
"cvss_v3": "AV:N/AC:L/PR:N/UI:N/S:C/C:N/I:H/A:N"
},
{
"id": "pre_existing_bias",
"cvss_v3": "AV:N/AC:L/PR:N/UI:N/S:C/C:N/I:H/A:N"
}
]
},
{
"id": "algorithmic_biases",
"children": [
{
"id": "processing_bias",
"cvss_v3": "AV:N/AC:L/PR:N/UI:N/S:C/C:N/I:H/A:N"
},
{
"id": "aggregation_bias",
"cvss_v3": "AV:N/AC:L/PR:N/UI:N/S:C/C:N/I:H/A:N"
}
]
},
{
"id": "societal_biases",
"children": [
{
"id": "confirmation_bias",
"cvss_v3": "AV:N/AC:L/PR:N/UI:R/S:U/C:L/I:L/A:N"
},
{
"id": "systemic_bias",
"cvss_v3": "AV:N/AC:L/PR:N/UI:N/S:C/C:N/I:H/A:N"
}
]
},
{
"id": "misinterpretation_biases",
"children": [
{
"id": "context_ignorance",
"cvss_v3": "AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:H/A:N"
}
]
},
{
"id": "developer_biases",
"children": [
{
"id": "implicit_bias",
"cvss_v3": "AV:N/AC:L/PR:N/UI:N/S:C/C:N/I:M/A:N"
}
]
},
{
"id": "unsafe_cross_origin_resource_sharing",
"cvss_v3": "AV:N/AC:L/PR:N/UI:R/S:U/C:H/I:L/A:N"
Expand Down
81 changes: 81 additions & 0 deletions mappings/remediation_advice/remediation_advice.json
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,87 @@
"https://cheatsheetseries.owasp.org/cheatsheets/HTTP_Request_Smuggling_Prevention_Cheat_Sheet.html"
]
},
{
"id": "data_biases",
"children": [
{
"id": "representation_bias",
"remediation_advice": "Diversify data sources and ensure the dataset represents all relevant groups proportionally to prevent skewing model outcomes.",
"references": [
"https://fairmlbook.org"
]
},
{
"id": "pre_existing_bias",
"remediation_advice": "Identify and correct biases in the data collection phase, and review historical data for any inherent biases that could affect outcomes.",
"references": [
"https://ai.google/responsibilities/responsible-ai-practices/"
]
}
]
},
{
"id": "algorithmic_biases",
"children": [
{
"id": "processing_bias",
"remediation_advice": "Design algorithms to process all data fairly, implementing checks to detect and mitigate biases that arise during data processing.",
"references": [
"https://paperswithcode.com/task/fairness-in-ml"
]
},
{
"id": "aggregation_bias",
"remediation_advice": "Ensure that data aggregation methods maintain the integrity of subgroups, preventing dilution of minority data.",
"references": [
"https://www.microsoft.com/en-us/ai/responsible-ai-resources"
]
}
]
},
{
"id": "societal_biases",
"children": [
{
"id": "confirmation_bias",
"remediation_advice": "Promote a culture of critical thinking and questioning in analysis to avoid confirming pre-existing beliefs or hypotheses.",
"references": [
"https://hbr.org/2019/05/the-problem-with-confirmation-bias"
]
},
{
"id": "systemic_bias",
"remediation_advice": "Review and reform policies and practices that lead to unfair advantages or disadvantages for specific groups across systems.",
"references": [
"https://www.brookings.edu/research/how-to-combat-systemic-bias/"
]
}
]
},
{
"id": "misinterpretation_biases",
"children": [
{
"id": "context_ignorance",
"remediation_advice": "Include contextual information in the analysis and interpretation phases to avoid misinterpreting data or trends.",
"references": [
"https://contextualscience.org/contextualism"
]
}
]
},
{
"id": "developer_biases",
"children": [
{
"id": "implicit_bias",
"remediation_advice": "Implement training programs to raise awareness and reduce the impact of unconscious biases among developers.",
"references": [
"https://implicit.harvard.edu/implicit/takeatest.html"
]
}
]
},
{
"id": "path_traversal",
"remediation_advice": "1. Prefer working without user input when using file system calls\n2. Use indexes rather than actual portions of file names when templating or using language files (i.e. value 5 from the user submission = Czechoslovakian, rather than expecting the user to return “Czechoslovakian”)\n3. Ensure the user cannot supply all parts of the path – surround it with your path code\n4. Validate the user’s input by only accepting known good – do not sanitize the data\n5. Use `chroot`ed jails and code access policies to restrict where the files can be obtained or saved to\n6. If forced to use user input for file operations, normalize the input before using in file I/O APIs, such as the [Java SE URI normalize() method](http://docs.oracle.com/javase/7/docs/api/java/net/URI.html#normalize).",
Expand Down
83 changes: 83 additions & 0 deletions vulnerability-rating-taxonomy.json
Original file line number Diff line number Diff line change
Expand Up @@ -1806,6 +1806,89 @@
}
]
},
{
"id": "data_biases",
"name": "Data Biases",
"type": "category",
"children": [
{
"id": "representation_bias",
"name": "Representation Bias",
"type": "subcategory",
"priority": null
},
{
"id": "pre_existing_bias",
"name": "Pre-existing Bias",
"type": "subcategory",
"priority": null
},
]
},
{
"id": "algorithmic_biases",
"name": "Algorithmic Biases",
"type": "category",
"children": [
{
"id": "processing_bias",
"name": "Processing Bias",
"type": "subcategory",
"priority": null
},
{
"id": "aggregation_bias",
"name": "Aggregation Bias",
"type": "subcategory",
"priority": null
},
]
},
{
"id": "societal_biases",
"name": "Societal Biases",
"type": "category",
"children": [
{
"id": "confirmation_bias",
"name": "Confirmation Bias",
"type": "subcategory",
"priority": null
},
{
"id": "systemic_bias",
"name": "Systemic Bias",
"type": "subcategory",
"priority": null
},
]
},
{
"id": "misinterpretation_biases",
"name": "Misinterpretation Biases",
"type": "category",
"children": [
{
"id": "context_ignorance",
"name": "Context Ignorance",
"type": "subcategory",
"priority": null
},
]
},
{
"id": "developer_biases",
"name": "Developer Biases",
"type": "category",
"children": [
{
"id": "implicit_bias",
"name": "Implicit Bias",
"type": "subcategory",
"priority": null
},
]
},
{
"id": "physical_security_issues",
"name": "Physical Security Issues",
Expand Down

0 comments on commit 2240e15

Please sign in to comment.