diff --git a/.gitignore b/.gitignore index 1c9db1a6..8d910e88 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,5 @@ -# Binaries -mattermost +# Binaries (root level only) +/mattermost *.exe # IDE diff --git a/build/docker/grafana/dashboards/mattermost/mattermost-agents-tokens.json b/build/docker/grafana/dashboards/mattermost/mattermost-agents-tokens.json new file mode 100644 index 00000000..aa4b5391 --- /dev/null +++ b/build/docker/grafana/dashboards/mattermost/mattermost-agents-tokens.json @@ -0,0 +1,1150 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": { + "type": "grafana", + "uid": "-- Grafana --" + }, + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 0, + "id": null, + "links": [], + "liveNow": false, + "panels": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "Total tokens consumed across all bots, teams, and users", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "yellow", + "value": 1000000 + }, + { + "color": "red", + "value": 10000000 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 6, + "x": 0, + "y": 0 + }, + "id": 1, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "values": false, + "calcs": [ + "lastNotNull" + ], + "fields": "" + }, + "textMode": "auto" + }, + "pluginVersion": "10.0.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "editorMode": "code", + "expr": "sum(agents_llm_input_tokens_total{bot_name=~\"$bot_name\",team_id=~\"$team_id\"}) + sum(agents_llm_output_tokens_total{bot_name=~\"$bot_name\",team_id=~\"$team_id\"})", + "legendFormat": "__auto", + "range": true, + "refId": "A" + } + ], + "title": "Total Tokens Consumed", + "type": "stat" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "Total input tokens consumed", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "blue", + "value": null + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 6, + "x": 6, + "y": 0 + }, + "id": 2, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "values": false, + "calcs": [ + "lastNotNull" + ], + "fields": "" + }, + "textMode": "auto" + }, + "pluginVersion": "10.0.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "editorMode": "code", + "expr": "sum(agents_llm_input_tokens_total{bot_name=~\"$bot_name\",team_id=~\"$team_id\"})", + "legendFormat": "__auto", + "range": true, + "refId": "A" + } + ], + "title": "Total Input Tokens", + "type": "stat" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "Total output tokens consumed", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "purple", + "value": null + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 6, + "x": 12, + "y": 0 + }, + "id": 3, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "values": false, + "calcs": [ + "lastNotNull" + ], + "fields": "" + }, + "textMode": "auto" + }, + "pluginVersion": "10.0.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "editorMode": "code", + "expr": "sum(agents_llm_output_tokens_total{bot_name=~\"$bot_name\",team_id=~\"$team_id\"})", + "legendFormat": "__auto", + "range": true, + "refId": "A" + } + ], + "title": "Total Output Tokens", + "type": "stat" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "Estimated cost based on typical LLM pricing (adjust calculation for your actual rates)", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "yellow", + "value": 100 + }, + { + "color": "red", + "value": 500 + } + ] + }, + "unit": "currencyUSD" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 6, + "x": 18, + "y": 0 + }, + "id": 4, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "values": false, + "calcs": [ + "lastNotNull" + ], + "fields": "" + }, + "textMode": "auto" + }, + "pluginVersion": "10.0.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "editorMode": "code", + "expr": "(sum(agents_llm_input_tokens_total{bot_name=~\"$bot_name\",team_id=~\"$team_id\"}) * 0.003 / 1000) + (sum(agents_llm_output_tokens_total{bot_name=~\"$bot_name\",team_id=~\"$team_id\"}) * 0.015 / 1000)", + "legendFormat": "__auto", + "range": true, + "refId": "A" + } + ], + "title": "Estimated Cost (Input: $0.003/1K, Output: $0.015/1K)", + "type": "stat" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "Token consumption over time", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "tooltip": false, + "viz": false, + "legend": false + }, + "lineInterpolation": "linear", + "lineWidth": 2, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + }, + "unit": "short" + }, + "overrides": [ + { + "matcher": { + "id": "byName", + "options": "Input Tokens" + }, + "properties": [ + { + "id": "color", + "value": { + "fixedColor": "blue", + "mode": "fixed" + } + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "Output Tokens" + }, + "properties": [ + { + "id": "color", + "value": { + "fixedColor": "purple", + "mode": "fixed" + } + } + ] + } + ] + }, + "gridPos": { + "h": 10, + "w": 24, + "x": 0, + "y": 8 + }, + "id": 5, + "options": { + "legend": { + "calcs": [ + "last", + "mean" + ], + "displayMode": "table", + "placement": "right", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "10.0.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "editorMode": "code", + "expr": "sum(increase(agents_llm_input_tokens_total{bot_name=~\"$bot_name\",team_id=~\"$team_id\"}[$__interval])) or sum(agents_llm_input_tokens_total{bot_name=~\"$bot_name\",team_id=~\"$team_id\"})", + "legendFormat": "Input Tokens", + "range": true, + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "editorMode": "code", + "expr": "sum(increase(agents_llm_output_tokens_total{bot_name=~\"$bot_name\",team_id=~\"$team_id\"}[$__interval])) or sum(agents_llm_output_tokens_total{bot_name=~\"$bot_name\",team_id=~\"$team_id\"})", + "legendFormat": "Output Tokens", + "range": true, + "refId": "B" + } + ], + "title": "Token Consumption Over Time", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "Token consumption by bot", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "tooltip": false, + "viz": false, + "legend": false + }, + "lineInterpolation": "linear", + "lineWidth": 2, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "normal" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 10, + "w": 12, + "x": 0, + "y": 18 + }, + "id": 6, + "options": { + "legend": { + "calcs": [ + "last", + "sum" + ], + "displayMode": "table", + "placement": "right", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "desc" + } + }, + "pluginVersion": "10.0.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "editorMode": "code", + "expr": "sum by (bot_name) (increase(agents_llm_input_tokens_total{bot_name=~\"$bot_name\",team_id=~\"$team_id\"}[$__interval]) + increase(agents_llm_output_tokens_total{bot_name=~\"$bot_name\",team_id=~\"$team_id\"}[$__interval])) or sum by (bot_name) (agents_llm_input_tokens_total{bot_name=~\"$bot_name\",team_id=~\"$team_id\"} + agents_llm_output_tokens_total{bot_name=~\"$bot_name\",team_id=~\"$team_id\"})", + "legendFormat": "{{bot_name}}", + "range": true, + "refId": "A" + } + ], + "title": "Token Usage by Bot", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "Token consumption by team", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "tooltip": false, + "viz": false, + "legend": false + }, + "lineInterpolation": "linear", + "lineWidth": 2, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "normal" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 10, + "w": 12, + "x": 12, + "y": 18 + }, + "id": 7, + "options": { + "legend": { + "calcs": [ + "last", + "sum" + ], + "displayMode": "table", + "placement": "right", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "desc" + } + }, + "pluginVersion": "10.0.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "editorMode": "code", + "expr": "sum by (team_id) (increase(agents_llm_input_tokens_total{bot_name=~\"$bot_name\",team_id=~\"$team_id\"}[$__interval]) + increase(agents_llm_output_tokens_total{bot_name=~\"$bot_name\",team_id=~\"$team_id\"}[$__interval])) or sum by (team_id) (agents_llm_input_tokens_total{bot_name=~\"$bot_name\",team_id=~\"$team_id\"} + agents_llm_output_tokens_total{bot_name=~\"$bot_name\",team_id=~\"$team_id\"})", + "legendFormat": "{{team_id}}", + "range": true, + "refId": "A" + } + ], + "title": "Token Usage by Team", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "Top bots by total token consumption", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "custom": { + "align": "auto", + "cellOptions": { + "type": "auto" + }, + "inspect": false + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + }, + "unit": "short" + }, + "overrides": [ + { + "matcher": { + "id": "byName", + "options": "Total Tokens" + }, + "properties": [ + { + "id": "custom.width", + "value": 150 + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "Estimated Cost" + }, + "properties": [ + { + "id": "unit", + "value": "currencyUSD" + }, + { + "id": "custom.width", + "value": 150 + } + ] + } + ] + }, + "gridPos": { + "h": 9, + "w": 12, + "x": 0, + "y": 28 + }, + "id": 8, + "options": { + "cellHeight": "sm", + "footer": { + "countRows": false, + "fields": "", + "reducer": [ + "sum" + ], + "show": false + }, + "showHeader": true, + "sortBy": [ + { + "desc": true, + "displayName": "Total Tokens" + } + ] + }, + "pluginVersion": "10.0.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "editorMode": "code", + "expr": "sum by (bot_name) (agents_llm_input_tokens_total{bot_name=~\"$bot_name\",team_id=~\"$team_id\"} + agents_llm_output_tokens_total{bot_name=~\"$bot_name\",team_id=~\"$team_id\"})", + "format": "table", + "instant": true, + "legendFormat": "__auto", + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "editorMode": "code", + "expr": "sum by (bot_name) ((agents_llm_input_tokens_total{bot_name=~\"$bot_name\",team_id=~\"$team_id\"} * 0.003 / 1000) + (agents_llm_output_tokens_total{bot_name=~\"$bot_name\",team_id=~\"$team_id\"} * 0.015 / 1000))", + "format": "table", + "hide": false, + "instant": true, + "legendFormat": "__auto", + "refId": "B" + } + ], + "title": "Top Bots by Token Usage", + "transformations": [ + { + "id": "merge", + "options": {} + }, + { + "id": "organize", + "options": { + "excludeByName": { + "Time": true + }, + "indexByName": { + "Time": 0, + "Value #A": 2, + "Value #B": 3, + "bot_name": 1 + }, + "renameByName": { + "Value #A": "Total Tokens", + "Value #B": "Estimated Cost", + "bot_name": "Bot Name" + } + } + } + ], + "type": "table" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "Top teams by total token consumption", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "custom": { + "align": "auto", + "cellOptions": { + "type": "auto" + }, + "inspect": false + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + }, + "unit": "short" + }, + "overrides": [ + { + "matcher": { + "id": "byName", + "options": "Total Tokens" + }, + "properties": [ + { + "id": "custom.width", + "value": 150 + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "Estimated Cost" + }, + "properties": [ + { + "id": "unit", + "value": "currencyUSD" + }, + { + "id": "custom.width", + "value": 150 + } + ] + } + ] + }, + "gridPos": { + "h": 9, + "w": 12, + "x": 12, + "y": 28 + }, + "id": 9, + "options": { + "cellHeight": "sm", + "footer": { + "countRows": false, + "fields": "", + "reducer": [ + "sum" + ], + "show": false + }, + "showHeader": true, + "sortBy": [ + { + "desc": true, + "displayName": "Total Tokens" + } + ] + }, + "pluginVersion": "10.0.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "editorMode": "code", + "expr": "sum by (team_id) (agents_llm_input_tokens_total{bot_name=~\"$bot_name\",team_id=~\"$team_id\"} + agents_llm_output_tokens_total{bot_name=~\"$bot_name\",team_id=~\"$team_id\"})", + "format": "table", + "instant": true, + "legendFormat": "__auto", + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "editorMode": "code", + "expr": "sum by (team_id) ((agents_llm_input_tokens_total{bot_name=~\"$bot_name\",team_id=~\"$team_id\"} * 0.003 / 1000) + (agents_llm_output_tokens_total{bot_name=~\"$bot_name\",team_id=~\"$team_id\"} * 0.015 / 1000))", + "format": "table", + "hide": false, + "instant": true, + "legendFormat": "__auto", + "refId": "B" + } + ], + "title": "Top Teams by Token Usage", + "transformations": [ + { + "id": "merge", + "options": {} + }, + { + "id": "organize", + "options": { + "excludeByName": { + "Time": true + }, + "indexByName": { + "Time": 0, + "Value #A": 2, + "Value #B": 3, + "team_id": 1 + }, + "renameByName": { + "Value #A": "Total Tokens", + "Value #B": "Estimated Cost", + "team_id": "Team ID" + } + } + } + ], + "type": "table" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "Breakdown of input vs output tokens as percentages", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "hideFrom": { + "tooltip": false, + "viz": false, + "legend": false + } + }, + "mappings": [], + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 8, + "x": 0, + "y": 46 + }, + "id": 11, + "options": { + "legend": { + "displayMode": "table", + "placement": "right", + "showLegend": true, + "values": [ + "value", + "percent" + ] + }, + "pieType": "donut", + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "10.0.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "editorMode": "code", + "expr": "sum(agents_llm_input_tokens_total{bot_name=~\"$bot_name\",team_id=~\"$team_id\"})", + "legendFormat": "Input Tokens", + "range": true, + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "editorMode": "code", + "expr": "sum(agents_llm_output_tokens_total{bot_name=~\"$bot_name\",team_id=~\"$team_id\"})", + "legendFormat": "Output Tokens", + "range": true, + "refId": "B" + } + ], + "title": "Input vs Output Token Distribution", + "type": "piechart" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "Estimated daily cost trend", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 20, + "gradientMode": "opacity", + "hideFrom": { + "tooltip": false, + "viz": false, + "legend": false + }, + "lineInterpolation": "smooth", + "lineWidth": 2, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + }, + "unit": "currencyUSD" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 16, + "x": 8, + "y": 46 + }, + "id": 12, + "options": { + "legend": { + "calcs": [ + "mean", + "last" + ], + "displayMode": "table", + "placement": "right", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "10.0.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "editorMode": "code", + "expr": "sum(increase(agents_llm_input_tokens_total{bot_name=~\"$bot_name\",team_id=~\"$team_id\"}[1d]) * 0.003 / 1000 + increase(agents_llm_output_tokens_total{bot_name=~\"$bot_name\",team_id=~\"$team_id\"}[1d]) * 0.015 / 1000)", + "legendFormat": "Daily Cost", + "range": true, + "refId": "A" + } + ], + "title": "Estimated Daily Cost Trend", + "type": "timeseries" + } + ], + "refresh": "30s", + "schemaVersion": 38, + "style": "dark", + "tags": [ + "llm", + "ai", + "tokens", + "mattermost", + "agents" + ], + "templating": { + "list": [ + { + "allValue": ".*", + "current": { + "selected": false, + "text": "All", + "value": "$__all" + }, + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "definition": "label_values(agents_llm_input_tokens_total, bot_name)", + "hide": 0, + "includeAll": true, + "label": "Bot", + "multi": true, + "name": "bot_name", + "options": [], + "query": { + "query": "label_values(agents_llm_input_tokens_total, bot_name)", + "refId": "StandardVariableQuery" + }, + "refresh": 2, + "regex": "", + "skipUrlSync": false, + "sort": 1, + "type": "query" + }, + { + "allValue": ".*", + "current": { + "selected": false, + "text": "All", + "value": "$__all" + }, + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "definition": "label_values(agents_llm_input_tokens_total, team_id)", + "hide": 0, + "includeAll": true, + "label": "Team", + "multi": true, + "name": "team_id", + "options": [], + "query": { + "query": "label_values(agents_llm_input_tokens_total, team_id)", + "refId": "StandardVariableQuery" + }, + "refresh": 2, + "regex": "", + "skipUrlSync": false, + "sort": 1, + "type": "query" + } + ] + }, + "time": { + "from": "now-24h", + "to": "now" + }, + "timepicker": {}, + "timezone": "", + "title": "Mattermost AI Agents - LLM Token Usage", + "uid": "mattermost-ai-tokens", + "version": 1, + "weekStart": "" +} diff --git a/build/docker/grafana/dashboards/mattermost/mattermost-collapsed_reply_threads_performance.json b/build/docker/grafana/dashboards/mattermost/mattermost-collapsed_reply_threads_performance.json new file mode 100644 index 00000000..8d44d79d --- /dev/null +++ b/build/docker/grafana/dashboards/mattermost/mattermost-collapsed_reply_threads_performance.json @@ -0,0 +1,456 @@ +{ + "__elements": [], + "__requires": [ + { + "type": "grafana", + "id": "grafana", + "name": "Grafana", + "version": "8.3.4" + }, + { + "type": "panel", + "id": "graph", + "name": "Graph (old)", + "version": "" + }, + { + "type": "datasource", + "id": "prometheus", + "name": "Prometheus", + "version": "1.0.0" + } + ], + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "target": { + "limit": 100, + "matchAny": false, + "tags": [], + "type": "dashboard" + }, + "type": "dashboard" + } + ] + }, + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 0, + "id": null, + "iteration": 1646759397232, + "links": [], + "liveNow": false, + "panels": [ + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "description": "", + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 9, + "w": 12, + "x": 0, + "y": 0 + }, + "hiddenSeries": false, + "id": 2, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "8.3.4", + "pointradius": 2, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "histogram_quantile (\n 0.99,\n sum by (le,instance)(\n rate(mattermost_db_store_time_bucket{instance=~\"$server\",method=\"ThreadStore.GetThreadsForUser\"}[5m])\n )\n)", + "interval": "", + "legendFormat": "p99-{{instance}}", + "refId": "A" + }, + { + "expr": "histogram_quantile (\n 0.50,\n sum by (le,instance)(\n rate(mattermost_db_store_time_bucket{instance=~\"$server\",method=\"ThreadStore.GetThreadsForUser\"}[5m])\n )\n)", + "interval": "", + "legendFormat": "p50-{{instance}}", + "refId": "B" + } + ], + "thresholds": [], + "timeRegions": [], + "title": "GetThreadsForUser duration", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "mode": "time", + "show": true, + "values": [] + }, + "yaxes": [ + { + "$$hashKey": "object:98", + "format": "s", + "logBase": 1, + "show": true + }, + { + "$$hashKey": "object:99", + "format": "short", + "logBase": 1, + "show": true + } + ], + "yaxis": { + "align": false + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 9, + "w": 12, + "x": 12, + "y": 0 + }, + "hiddenSeries": false, + "id": 4, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "8.3.4", + "pointradius": 2, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "sum(rate(mattermost_db_store_time_count{instance=~\"$server\",method=\"ThreadStore.GetThreadsForUser\"}[1m])) by (instance)", + "interval": "", + "legendFormat": "count-{{instance}}", + "refId": "A" + }, + { + "expr": "sum(rate(mattermost_db_store_time_count{instance=~\"$server\",method=\"ThreadStore.GetThreadsForUser\"}[1m]))", + "interval": "", + "legendFormat": "Total", + "refId": "B" + } + ], + "thresholds": [], + "timeRegions": [], + "title": "GetThreadsForUser Requests Per Second", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "mode": "time", + "show": true, + "values": [] + }, + "yaxes": [ + { + "$$hashKey": "object:396", + "format": "short", + "logBase": 1, + "show": true + }, + { + "$$hashKey": "object:397", + "format": "short", + "logBase": 1, + "show": true + } + ], + "yaxis": { + "align": false + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 9 + }, + "hiddenSeries": false, + "id": 6, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "8.3.4", + "pointradius": 2, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "exemplar": true, + "expr": "histogram_quantile (\n 0.99,\n sum by (le,instance)(\n rate(mattermost_db_store_time_bucket{instance=~\"$server\",method=\"ThreadStore.MarkAllAsReadByChannels\"}[5m])\n )\n)", + "interval": "", + "legendFormat": "p99-{{instance}}", + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "exemplar": true, + "expr": "histogram_quantile (\n 0.50,\n sum by (le,instance)(\n rate(mattermost_db_store_time_bucket{instance=~\"$server\",method=\"ThreadStore.MarkAllAsReadByChannels\"}[5m])\n )\n)", + "interval": "", + "legendFormat": "p50-{{instance}}", + "refId": "B" + } + ], + "thresholds": [], + "timeRegions": [], + "title": "MarkAllAsReadByChannels duration", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "mode": "time", + "show": true, + "values": [] + }, + "yaxes": [ + { + "$$hashKey": "object:504", + "format": "s", + "logBase": 1, + "show": true + }, + { + "$$hashKey": "object:505", + "format": "short", + "logBase": 1, + "show": true + } + ], + "yaxis": { + "align": false + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 9 + }, + "hiddenSeries": false, + "id": 8, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "8.3.4", + "pointradius": 2, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "exemplar": true, + "expr": "sum(rate(mattermost_db_store_time_count{instance=~\"$server\",method=\"ThreadStore.MarkAllAsReadByChannels\"}[1m])) by (instance)", + "interval": "", + "legendFormat": "count-{{instance}}", + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "exemplar": true, + "expr": "sum(rate(mattermost_db_store_time_count{instance=~\"$server\",method=\"ThreadStore.MarkAllAsReadByChannels\"}[1m]))", + "interval": "", + "legendFormat": "Total", + "refId": "B" + } + ], + "thresholds": [], + "timeRegions": [], + "title": "MarkAllAsReadByChannels Requests Per Second", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "mode": "time", + "show": true, + "values": [] + }, + "yaxes": [ + { + "$$hashKey": "object:714", + "format": "short", + "logBase": 1, + "show": true + }, + { + "$$hashKey": "object:715", + "format": "short", + "logBase": 1, + "show": true + } + ], + "yaxis": { + "align": false + } + } + ], + "refresh": "10s", + "schemaVersion": 34, + "style": "dark", + "tags": [], + "templating": { + "list": [ + { + "current": {}, + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "definition": "label_values(instance)", + "hide": 0, + "includeAll": true, + "label": "server", + "multi": true, + "name": "server", + "options": [], + "query": { + "query": "label_values(instance)", + "refId": "Prometheus-server-Variable-Query" + }, + "refresh": 1, + "regex": "", + "skipUrlSync": false, + "sort": 0, + "tagValuesQuery": "", + "tagsQuery": "", + "type": "query", + "useTags": false + } + ] + }, + "time": { + "from": "now-3h", + "to": "now" + }, + "timepicker": {}, + "timezone": "", + "title": "Collapsed Reply Threads Performance", + "uid": "cZY9yFJ7z", + "version": 5, + "weekStart": "" +} diff --git a/build/docker/grafana/dashboards/mattermost/mattermost-desktop-app-metrics.json b/build/docker/grafana/dashboards/mattermost/mattermost-desktop-app-metrics.json new file mode 100644 index 00000000..eaebbad9 --- /dev/null +++ b/build/docker/grafana/dashboards/mattermost/mattermost-desktop-app-metrics.json @@ -0,0 +1,412 @@ +{ + "__elements": {}, + "__requires": [ + { + "type": "grafana", + "id": "grafana", + "name": "Grafana", + "version": "10.4.2" + }, + { + "type": "datasource", + "id": "prometheus", + "name": "Prometheus", + "version": "1.0.0" + }, + { + "type": "panel", + "id": "timeseries", + "name": "Time series", + "version": "" + } + ], + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": { + "type": "grafana", + "uid": "-- Grafana --" + }, + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 0, + "id": null, + "links": [], + "panels": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "The average amount of a machine's CPU used by the given Desktop App process over the measuring interval (usually 1 minute). ", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 10, + "w": 24, + "x": 0, + "y": 0 + }, + "id": 1, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "editorMode": "code", + "expr": "histogram_quantile($percentile / 100, sum by(le) (increase(mattermost_desktopapp_cpu_usage_bucket{processName=~\"$processName\",version=~\"$version\",platform=~\"$platform\"}[$decay])))", + "instant": false, + "legendFormat": "[[percentile]]th Percentile", + "range": true, + "refId": "A" + } + ], + "title": "CPU Usage (%)", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "The number of megabytes used by a Desktop App process at the time of measurement.", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic-by-name" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 9, + "w": 24, + "x": 0, + "y": 10 + }, + "id": 2, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "editorMode": "code", + "expr": "histogram_quantile($percentile / 100, sum by(le) (increase(mattermost_desktopapp_memory_usage_bucket{processName=~\"$processName\",version=~\"$version\",platform=~\"$platform\"}[$decay])))", + "instant": false, + "legendFormat": "[[percentile]]th Percentile", + "range": true, + "refId": "A" + } + ], + "title": "Memory Usage (MBs)", + "type": "timeseries" + } + ], + "refresh": "1m", + "schemaVersion": 39, + "tags": [], + "templating": { + "list": [ + { + "current": {}, + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "definition": "label_values(mattermost_desktopapp_cpu_usage_bucket,processName)", + "hide": 0, + "includeAll": true, + "label": "Process", + "multi": true, + "name": "processName", + "options": [], + "query": { + "qryType": 1, + "query": "label_values(mattermost_desktopapp_cpu_usage_bucket,processName)", + "refId": "PrometheusVariableQueryEditor-VariableQuery" + }, + "refresh": 1, + "regex": "", + "skipUrlSync": false, + "sort": 0, + "type": "query" + }, + { + "current": {}, + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "definition": "label_values(mattermost_desktopapp_cpu_usage_bucket,version)", + "hide": 0, + "includeAll": true, + "label": "App Version", + "multi": true, + "name": "version", + "options": [], + "query": { + "qryType": 1, + "query": "label_values(mattermost_desktopapp_cpu_usage_bucket,version)", + "refId": "PrometheusVariableQueryEditor-VariableQuery" + }, + "refresh": 1, + "regex": "", + "skipUrlSync": false, + "sort": 0, + "type": "query" + }, + { + "current": {}, + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "definition": "label_values(mattermost_desktopapp_cpu_usage_bucket,platform)", + "hide": 0, + "includeAll": true, + "multi": true, + "name": "platform", + "options": [], + "query": { + "qryType": 1, + "query": "label_values(mattermost_desktopapp_cpu_usage_bucket,platform)", + "refId": "PrometheusVariableQueryEditor-VariableQuery" + }, + "refresh": 1, + "regex": "", + "skipUrlSync": false, + "sort": 0, + "type": "query" + }, + { + "current": { + "selected": true, + "text": "50", + "value": "50" + }, + "hide": 0, + "includeAll": false, + "label": "Percentile", + "multi": false, + "name": "percentile", + "options": [ + { + "selected": true, + "text": "50", + "value": "50" + }, + { + "selected": false, + "text": "75", + "value": "75" + }, + { + "selected": false, + "text": "90", + "value": "90" + }, + { + "selected": false, + "text": "99", + "value": "99" + } + ], + "query": "50,75,90,99", + "queryValue": "", + "skipUrlSync": false, + "type": "custom" + }, + { + "current": { + "selected": false, + "text": "30m", + "value": "30m" + }, + "hide": 0, + "includeAll": false, + "label": "Decay Time", + "multi": false, + "name": "decay", + "options": [ + { + "selected": true, + "text": "30m", + "value": "30m" + }, + { + "selected": false, + "text": "1h", + "value": "1h" + }, + { + "selected": false, + "text": "3h", + "value": "3h" + }, + { + "selected": false, + "text": "6h", + "value": "6h" + }, + { + "selected": false, + "text": "12h", + "value": "12h" + }, + { + "selected": false, + "text": "1d", + "value": "1d" + } + ], + "query": "30m,1h,3h,6h,12h,1d", + "queryValue": "", + "skipUrlSync": false, + "type": "custom" + } + ] + }, + "time": { + "from": "now-24h", + "to": "now" + }, + "timepicker": {}, + "timezone": "browser", + "title": "Desktop App Metrics", + "uid": "fe12lkd7062v4a", + "version": 4, + "weekStart": "" +} diff --git a/build/docker/grafana/dashboards/mattermost/mattermost-mobile-performance.json b/build/docker/grafana/dashboards/mattermost/mattermost-mobile-performance.json new file mode 100644 index 00000000..1b72d059 --- /dev/null +++ b/build/docker/grafana/dashboards/mattermost/mattermost-mobile-performance.json @@ -0,0 +1,2073 @@ +{ + "__inputs": [ + { + "name": "VAR_RATE", + "type": "constant", + "label": "rate", + "value": "1h", + "description": "" + } + ], + "__elements": {}, + "__requires": [ + { + "type": "grafana", + "id": "grafana", + "name": "Grafana", + "version": "11.1.0" + }, + { + "type": "datasource", + "id": "prometheus", + "name": "Prometheus", + "version": "1.0.0" + }, + { + "type": "panel", + "id": "stat", + "name": "Stat", + "version": "" + }, + { + "type": "panel", + "id": "timeseries", + "name": "Time series", + "version": "" + } + ], + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": { + "type": "grafana", + "uid": "-- Grafana --" + }, + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 0, + "id": null, + "links": [], + "panels": [ + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 0 + }, + "id": 13, + "panels": [], + "title": "Load", + "type": "row" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "Load times between the moment the app is initialized and the moment the target screen (Home, Channel or Thread) is shown in the screen.\n\nEach point in the graph shows the percentile of the events during the last hour.", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [ + { + "matcher": { + "id": "byFrameRefID", + "options": "Total" + }, + "properties": [ + { + "id": "displayName", + "value": "All" + } + ] + }, + { + "matcher": { + "id": "byFrameRefID", + "options": "Android" + }, + "properties": [ + { + "id": "displayName", + "value": "Android" + } + ] + }, + { + "matcher": { + "id": "byFrameRefID", + "options": "iOS" + }, + "properties": [ + { + "id": "displayName", + "value": "iOS" + } + ] + } + ] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 1 + }, + "id": 2, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "histogram_quantile($percentile, sum by(le) (rate(mattermost_mobileapp_mobile_load_bucket{}[$rate])))", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "Total", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "histogram_quantile($percentile, sum by(le) (rate(mattermost_mobileapp_mobile_load_bucket{platform=\"android\"}[$rate])))", + "fullMetaSearch": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "Android", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "histogram_quantile($percentile, sum by(le) (rate(mattermost_mobileapp_mobile_load_bucket{platform=\"ios\"}[$rate])))", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "iOS", + "useBackend": false + } + ], + "title": "Load", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "Load times between the moment the app is initialized and the moment the target screen (Home, Channel or Thread) is shown in the screen.\n\nThese values represent the percentile of all events since the last metrics restart.", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "#EAB839", + "value": 4 + }, + { + "color": "red", + "value": 10 + } + ] + } + }, + "overrides": [ + { + "matcher": { + "id": "byFrameRefID", + "options": "Total" + }, + "properties": [ + { + "id": "displayName", + "value": "All" + } + ] + }, + { + "matcher": { + "id": "byFrameRefID", + "options": "Android" + }, + "properties": [ + { + "id": "displayName", + "value": "Android" + } + ] + }, + { + "matcher": { + "id": "byFrameRefID", + "options": "iOS" + }, + "properties": [ + { + "id": "displayName", + "value": "iOS" + } + ] + } + ] + }, + "gridPos": { + "h": 8, + "w": 6, + "x": 12, + "y": 1 + }, + "id": 7, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "horizontal", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.1.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "histogram_quantile($percentile, sum by(le) (mattermost_mobileapp_mobile_load_bucket{}))", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "Total", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "histogram_quantile($percentile, sum by(le) (mattermost_mobileapp_mobile_load_bucket{platform=\"android\"}))", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "Android", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "histogram_quantile($percentile, sum by(le) (mattermost_mobileapp_mobile_load_bucket{platform=\"ios\"}))", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "iOS", + "useBackend": false + } + ], + "title": "Load lifetime percentiles", + "type": "stat" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "Load times between the moment the app is initialized and the moment the target screen (Home, Channel or Thread) is shown in the screen.\n\nThis graph represents all events generated since the last metrics restart.", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [ + { + "matcher": { + "id": "byFrameRefID", + "options": "Total" + }, + "properties": [ + { + "id": "displayName", + "value": "All" + } + ] + }, + { + "matcher": { + "id": "byFrameRefID", + "options": "Android" + }, + "properties": [ + { + "id": "displayName", + "value": "Android" + } + ] + }, + { + "matcher": { + "id": "byFrameRefID", + "options": "iOS" + }, + "properties": [ + { + "id": "displayName", + "value": "iOS" + } + ] + } + ] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 9 + }, + "id": 10, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "sum(mattermost_mobileapp_mobile_load_count{})", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "Total", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "sum(mattermost_mobileapp_mobile_load_count{platform=\"android\"})", + "fullMetaSearch": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "Android", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "sum(mattermost_mobileapp_mobile_load_count{platform=\"ios\"})", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "iOS", + "useBackend": false + } + ], + "title": "# Events load", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "Load times between the moment the app is initialized and the moment the target screen (Home, Channel or Thread) is shown in the screen.\n\nThese numbers represent the number of events since the last metrics restart.", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [ + { + "matcher": { + "id": "byFrameRefID", + "options": "Total" + }, + "properties": [ + { + "id": "displayName", + "value": "All" + } + ] + }, + { + "matcher": { + "id": "byFrameRefID", + "options": "Android" + }, + "properties": [ + { + "id": "displayName", + "value": "Android" + } + ] + }, + { + "matcher": { + "id": "byFrameRefID", + "options": "iOS" + }, + "properties": [ + { + "id": "displayName", + "value": "iOS" + } + ] + } + ] + }, + "gridPos": { + "h": 8, + "w": 6, + "x": 12, + "y": 9 + }, + "id": 4, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "horizontal", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.1.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "sum(mattermost_mobileapp_mobile_load_count{})", + "fullMetaSearch": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "Total", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "sum(mattermost_mobileapp_mobile_load_count{platform=\"android\"})", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "Android", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "sum(mattermost_mobileapp_mobile_load_count{platform=\"ios\"})", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "iOS", + "useBackend": false + } + ], + "title": "# Events Load", + "type": "stat" + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 17 + }, + "id": 14, + "panels": [], + "title": "Channel Switch", + "type": "row" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "Channel switch represents the time between the user clicks on a channel name, and the app shows the latest post in that channel.\n\nEach point in the graph shows the percentile of the events during the last hour.", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [ + { + "matcher": { + "id": "byFrameRefID", + "options": "Total" + }, + "properties": [ + { + "id": "displayName", + "value": "All" + } + ] + }, + { + "matcher": { + "id": "byFrameRefID", + "options": "Android" + }, + "properties": [ + { + "id": "displayName", + "value": "Android" + } + ] + }, + { + "matcher": { + "id": "byFrameRefID", + "options": "iOS" + }, + "properties": [ + { + "id": "displayName", + "value": "iOS" + } + ] + } + ] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 18 + }, + "id": 1, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "histogram_quantile($percentile, sum by(le) (rate(mattermost_mobileapp_mobile_channel_switch_bucket{}[$rate])))", + "fullMetaSearch": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "Total", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "histogram_quantile($percentile, sum by(le) (rate(mattermost_mobileapp_mobile_channel_switch_bucket{platform=\"android\"}[$rate])))", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "Android", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "histogram_quantile($percentile, sum by(le) (rate(mattermost_mobileapp_mobile_channel_switch_bucket{platform=\"ios\"}[$rate])))", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "iOS", + "useBackend": false + } + ], + "title": "Channel Switch", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "Channel switch represents the time between the user clicks on a channel name, and the app shows the latest post in that channel.\n\nThese values represent the percentile of all events since the last metrics restart.", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "#EAB839", + "value": 0.7 + }, + { + "color": "red", + "value": 1.5 + } + ] + } + }, + "overrides": [ + { + "matcher": { + "id": "byFrameRefID", + "options": "Total" + }, + "properties": [ + { + "id": "displayName", + "value": "All" + } + ] + }, + { + "matcher": { + "id": "byFrameRefID", + "options": "Android" + }, + "properties": [ + { + "id": "displayName", + "value": "Android" + } + ] + }, + { + "matcher": { + "id": "byFrameRefID", + "options": "iOS" + }, + "properties": [ + { + "id": "displayName", + "value": "iOS" + } + ] + } + ] + }, + "gridPos": { + "h": 8, + "w": 6, + "x": 12, + "y": 18 + }, + "id": 8, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "horizontal", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.1.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "histogram_quantile($percentile, sum by(le) (mattermost_mobileapp_mobile_channel_switch_bucket{}))", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "Total", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "histogram_quantile($percentile, sum by(le) (mattermost_mobileapp_mobile_channel_switch_bucket{platform=\"android\"}))", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "Android", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "histogram_quantile($percentile, sum by(le) (mattermost_mobileapp_mobile_channel_switch_bucket{platform=\"ios\"}))", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "iOS", + "useBackend": false + } + ], + "title": "Channel switch lifetime percentiles", + "type": "stat" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "Channel switch represents the time between the user clicks on a channel name, and the app shows the latest post in that channel.\n\nThis graph represents all events generated since the last metrics restart.", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [ + { + "matcher": { + "id": "byFrameRefID", + "options": "Total" + }, + "properties": [ + { + "id": "displayName", + "value": "All" + } + ] + }, + { + "matcher": { + "id": "byFrameRefID", + "options": "Android" + }, + "properties": [ + { + "id": "displayName", + "value": "Android" + } + ] + }, + { + "matcher": { + "id": "byFrameRefID", + "options": "iOS" + }, + "properties": [ + { + "id": "displayName", + "value": "iOS" + } + ] + } + ] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 26 + }, + "id": 11, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "sum(mattermost_mobileapp_mobile_channel_switch_count{})", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "Total", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "sum(mattermost_mobileapp_mobile_channel_switch_count{platform=\"android\"})", + "fullMetaSearch": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "Android", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "sum(mattermost_mobileapp_mobile_channel_switch_count{platform=\"ios\"})", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "iOS", + "useBackend": false + } + ], + "title": "# Events Channel Switch", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "Channel switch represents the time between the user clicks on a channel name, and the app shows the latest post in that channel.\n\nThese numbers represent the number of events since the last metrics restart.", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + } + }, + "overrides": [ + { + "matcher": { + "id": "byFrameRefID", + "options": "Total" + }, + "properties": [ + { + "id": "displayName", + "value": "All" + } + ] + }, + { + "matcher": { + "id": "byFrameRefID", + "options": "Android" + }, + "properties": [ + { + "id": "displayName", + "value": "Android" + } + ] + }, + { + "matcher": { + "id": "byFrameRefID", + "options": "iOS" + }, + "properties": [ + { + "id": "displayName", + "value": "iOS" + } + ] + } + ] + }, + "gridPos": { + "h": 8, + "w": 6, + "x": 12, + "y": 26 + }, + "id": 16, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "horizontal", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.1.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "sum(mattermost_mobileapp_mobile_channel_switch_count{})", + "fullMetaSearch": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "Total", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "sum(mattermost_mobileapp_mobile_channel_switch_count{platform=\"android\"})", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "Android", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "sum(mattermost_mobileapp_mobile_channel_switch_count{platform=\"ios\"})", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "iOS", + "useBackend": false + } + ], + "title": "# Events Channel Switch", + "type": "stat" + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 34 + }, + "id": 15, + "panels": [], + "title": "Team switch", + "type": "row" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "Team switch represents the time between the user taps on a team, and the full category screen changes to the one of the new team.\n\nEach point in the graph shows the percentile of the events during the last hour.", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [ + { + "matcher": { + "id": "byFrameRefID", + "options": "Total" + }, + "properties": [ + { + "id": "displayName", + "value": "All" + } + ] + }, + { + "matcher": { + "id": "byFrameRefID", + "options": "Android" + }, + "properties": [ + { + "id": "displayName", + "value": "Android" + } + ] + }, + { + "matcher": { + "id": "byFrameRefID", + "options": "iOS" + }, + "properties": [ + { + "id": "displayName", + "value": "iOS" + } + ] + } + ] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 35 + }, + "id": 3, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "histogram_quantile($percentile, sum by(le) (rate(mattermost_mobileapp_mobile_team_switch_bucket{}[$rate])))", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "Total", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "histogram_quantile($percentile, sum by(le) (rate(mattermost_mobileapp_mobile_team_switch_bucket{platform=\"android\"}[$rate])))", + "fullMetaSearch": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "Android", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "histogram_quantile($percentile, sum by(le) (rate(mattermost_mobileapp_mobile_team_switch_bucket{platform=\"ios\"}[$rate])))", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "iOS", + "useBackend": false + } + ], + "title": "Mobile Team switch", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "Team switch represents the time between the user taps on a team, and the full category screen changes to the one of the new team.\n\nThese values represent the percentile of all events since the last metrics restart.", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "#EAB839", + "value": 0.5 + }, + { + "color": "red", + "value": 1 + } + ] + } + }, + "overrides": [ + { + "matcher": { + "id": "byFrameRefID", + "options": "Total" + }, + "properties": [ + { + "id": "displayName", + "value": "All" + } + ] + }, + { + "matcher": { + "id": "byFrameRefID", + "options": "Android" + }, + "properties": [ + { + "id": "displayName", + "value": "Android" + } + ] + }, + { + "matcher": { + "id": "byFrameRefID", + "options": "iOS" + }, + "properties": [ + { + "id": "displayName", + "value": "iOS" + } + ] + } + ] + }, + "gridPos": { + "h": 8, + "w": 6, + "x": 12, + "y": 35 + }, + "id": 9, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "horizontal", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.1.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "histogram_quantile($percentile, sum by(le) (mattermost_mobileapp_mobile_team_switch_bucket{}))", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "Total", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "histogram_quantile($percentile, sum by(le) (mattermost_mobileapp_mobile_team_switch_bucket{platform=\"android\"}))", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "Android", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "histogram_quantile($percentile, sum by(le) (mattermost_mobileapp_mobile_team_switch_bucket{platform=\"ios\"}))", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "iOS", + "useBackend": false + } + ], + "title": "Team Switch lifetime percentiles", + "type": "stat" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "Team switch represents the time between the user taps on a team, and the full category screen changes to the one of the new team.\n\nThis graph represents all events generated since the last metrics restart.", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [ + { + "matcher": { + "id": "byFrameRefID", + "options": "Total" + }, + "properties": [ + { + "id": "displayName", + "value": "All" + } + ] + }, + { + "matcher": { + "id": "byFrameRefID", + "options": "Android" + }, + "properties": [ + { + "id": "displayName", + "value": "Android" + } + ] + }, + { + "matcher": { + "id": "byFrameRefID", + "options": "iOS" + }, + "properties": [ + { + "id": "displayName", + "value": "iOS" + } + ] + } + ] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 43 + }, + "id": 12, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "sum(mattermost_mobileapp_mobile_team_switch_count{})", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "Total", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "sum(mattermost_mobileapp_mobile_team_switch_count{platform=\"android\"})", + "fullMetaSearch": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "Android", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "sum(mattermost_mobileapp_mobile_team_switch_count{platform=\"ios\"})", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "iOS", + "useBackend": false + } + ], + "title": "# Events Team Switch", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "Team switch represents the time between the user taps on a team, and the full category screen changes to the one of the new team.\n\nThese numbers represent the number of events since the last metrics restart.", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + } + }, + "overrides": [ + { + "matcher": { + "id": "byFrameRefID", + "options": "Total" + }, + "properties": [ + { + "id": "displayName", + "value": "All" + } + ] + }, + { + "matcher": { + "id": "byFrameRefID", + "options": "Android" + }, + "properties": [ + { + "id": "displayName", + "value": "Android" + } + ] + }, + { + "matcher": { + "id": "byFrameRefID", + "options": "iOS" + }, + "properties": [ + { + "id": "displayName", + "value": "iOS" + } + ] + } + ] + }, + "gridPos": { + "h": 8, + "w": 6, + "x": 12, + "y": 43 + }, + "id": 17, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "horizontal", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.1.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "sum(mattermost_mobileapp_mobile_team_switch_count{})", + "fullMetaSearch": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "Total", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "sum(mattermost_mobileapp_mobile_team_switch_count{platform=\"android\"})", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "Android", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "sum(mattermost_mobileapp_mobile_team_switch_count{platform=\"ios\"})", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "iOS", + "useBackend": false + } + ], + "title": "# Events Team Switch", + "type": "stat" + } + ], + "refresh": "", + "schemaVersion": 39, + "tags": [], + "templating": { + "list": [ + { + "description": "rate for measurements", + "hide": 2, + "name": "rate", + "query": "${VAR_RATE}", + "skipUrlSync": false, + "type": "constant", + "current": { + "value": "${VAR_RATE}", + "text": "${VAR_RATE}", + "selected": false + }, + "options": [ + { + "value": "${VAR_RATE}", + "text": "${VAR_RATE}", + "selected": false + } + ] + }, + { + "current": { + "selected": false, + "text": "0.95", + "value": "0.95" + }, + "hide": 0, + "includeAll": false, + "multi": false, + "name": "percentile", + "options": [ + { + "selected": false, + "text": "0.99", + "value": "0.99" + }, + { + "selected": true, + "text": "0.95", + "value": "0.95" + }, + { + "selected": false, + "text": "0.90", + "value": "0.90" + }, + { + "selected": false, + "text": "0.75", + "value": "0.75" + } + ], + "query": "0.99, 0.95, 0.90, 0.75", + "queryValue": "", + "skipUrlSync": false, + "type": "custom" + } + ] + }, + "time": { + "from": "now-24h", + "to": "now" + }, + "timepicker": {}, + "timezone": "browser", + "title": "Mobile Performance Metrics", + "uid": "ddp4txrlmyg3kc", + "version": 26, + "weekStart": "" +} diff --git a/build/docker/grafana/dashboards/mattermost/mattermost-performance-kpi-metrics_rev2.json b/build/docker/grafana/dashboards/mattermost/mattermost-performance-kpi-metrics_rev2.json new file mode 100644 index 00000000..6efd3f0c --- /dev/null +++ b/build/docker/grafana/dashboards/mattermost/mattermost-performance-kpi-metrics_rev2.json @@ -0,0 +1,1460 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "gnetId": 2539, + "graphTooltip": 0, + "id": null, + "iteration": 1532610781587, + "links": [], + "panels": [ + { + "cacheTimeout": null, + "colorBackground": false, + "colorValue": false, + "colors": [ + "rgba(50, 172, 45, 0.97)", + "rgba(237, 129, 40, 0.89)", + "rgba(245, 54, 54, 0.9)" + ], + "datasource": "Prometheus", + "editable": true, + "error": false, + "format": "none", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": true, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "gridPos": { + "h": 4, + "w": 4, + "x": 0, + "y": 0 + }, + "id": 9, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": " %", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": false, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "tableColumn": "", + "targets": [ + { + "expr": "max(irate(mattermost_process_cpu_seconds_total{instance=~\"$server\"}[1m])* 100)", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "", + "metric": "mattermost_process_cpu_seconds_total", + "refId": "A", + "step": 5 + } + ], + "thresholds": "50,80", + "title": "Max CPU utilization rate (%)", + "type": "singlestat", + "valueFontSize": "80%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "current" + }, + { + "cacheTimeout": null, + "colorBackground": false, + "colorValue": false, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "datasource": "Prometheus", + "editable": true, + "error": false, + "format": "none", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": false, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "gridPos": { + "h": 4, + "w": 4, + "x": 4, + "y": 0 + }, + "id": 21, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": false, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "tableColumn": "", + "targets": [ + { + "expr": "sum(rate(mattermost_post_total{instance=~\"$server\"}[1m]))", + "format": "time_series", + "interval": "1m", + "intervalFactor": 1, + "legendFormat": "", + "refId": "A", + "step": 60 + } + ], + "thresholds": "", + "title": "Number of messages / min", + "type": "singlestat", + "valueFontSize": "80%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "current" + }, + { + "cacheTimeout": null, + "colorBackground": false, + "colorValue": false, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "datasource": "Prometheus", + "editable": true, + "error": false, + "format": "none", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": false, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "gridPos": { + "h": 4, + "w": 4, + "x": 8, + "y": 0 + }, + "id": 20, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": false, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "tableColumn": "", + "targets": [ + { + "expr": "sum(rate(mattermost_http_errors_total{instance=~\"$server\"}[1m]))", + "format": "time_series", + "interval": "1m", + "intervalFactor": 1, + "refId": "A", + "step": 60 + } + ], + "thresholds": "", + "title": "Number of API errors / min", + "type": "singlestat", + "valueFontSize": "80%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "current" + }, + { + "cacheTimeout": null, + "colorBackground": false, + "colorValue": false, + "colors": [ + "rgba(50, 172, 45, 0.97)", + "rgba(237, 129, 40, 0.89)", + "rgba(245, 54, 54, 0.9)" + ], + "datasource": "Prometheus", + "editable": true, + "error": false, + "format": "none", + "gauge": { + "maxValue": 1000, + "minValue": 0, + "show": false, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "gridPos": { + "h": 4, + "w": 4, + "x": 12, + "y": 0 + }, + "id": 13, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": false, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "tableColumn": "", + "targets": [ + { + "expr": "sum(mattermost_http_websockets_total{instance=~\"$server\"})", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "refId": "A", + "step": 5 + } + ], + "thresholds": "500, 800", + "title": "Number of connected devices", + "type": "singlestat", + "valueFontSize": "80%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "current" + }, + { + "cacheTimeout": null, + "colorBackground": false, + "colorValue": false, + "colors": [ + "rgba(50, 172, 45, 0.97)", + "rgba(237, 129, 40, 0.89)", + "rgba(245, 54, 54, 0.9)" + ], + "datasource": "Prometheus", + "editable": true, + "error": false, + "format": "none", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": false, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "gridPos": { + "h": 4, + "w": 4, + "x": 16, + "y": 0 + }, + "id": 17, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": false, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "tableColumn": "", + "targets": [ + { + "expr": "sum(mattermost_db_master_connections_total{instance=~\"$server\"})", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "refId": "A", + "step": 5 + } + ], + "thresholds": "50, 80", + "title": "Number of master DB conns", + "type": "singlestat", + "valueFontSize": "80%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "current" + }, + { + "cacheTimeout": null, + "colorBackground": false, + "colorValue": false, + "colors": [ + "rgba(50, 172, 45, 0.97)", + "rgba(237, 129, 40, 0.89)", + "rgba(245, 54, 54, 0.9)" + ], + "datasource": "Prometheus", + "editable": true, + "error": false, + "format": "none", + "gauge": { + "maxValue": 1000, + "minValue": 0, + "show": false, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "gridPos": { + "h": 4, + "w": 4, + "x": 20, + "y": 0 + }, + "id": 12, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": " MB", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": false, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "tableColumn": "", + "targets": [ + { + "expr": "sum(go_memstats_alloc_bytes{instance=~\"$server\"})/1024/1024", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "refId": "A", + "step": 5 + } + ], + "thresholds": "500, 800", + "title": "Server memory usage (in MB)", + "type": "singlestat", + "valueFontSize": "80%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "current" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "editable": true, + "error": false, + "fill": 1, + "gridPos": { + "h": 7, + "w": 12, + "x": 0, + "y": 4 + }, + "id": 2, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "irate(mattermost_post_total{instance=~\"$server\"}[1m])", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "{{instance}}", + "metric": "mattermost_post_total", + "refId": "A", + "step": 5 + }, + { + "expr": "sum(irate(mattermost_post_total{instance=~\"$server\"}[1m]))", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "Total", + "metric": "mattermost_post_total", + "refId": "B", + "step": 5 + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Number of Messages per Second", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": "Count", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "alert": { + "conditions": [ + { + "evaluator": { + "params": [ + 5000 + ], + "type": "gt" + }, + "operator": { + "type": "and" + }, + "query": { + "params": [ + "C", + "1m", + "now" + ] + }, + "reducer": { + "params": [], + "type": "avg" + }, + "type": "query" + } + ], + "executionErrorState": "alerting", + "frequency": "60s", + "handler": 1, + "name": "Number of API Errors per Second alert", + "noDataState": "no_data", + "notifications": [] + }, + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "editable": true, + "error": false, + "fill": 1, + "gridPos": { + "h": 7, + "w": 12, + "x": 12, + "y": 4 + }, + "id": 5, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "irate(mattermost_http_errors_total{instance=~\"$server\"}[1m])", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "{{instance}}", + "metric": "mattermost_http_errors_total", + "refId": "A", + "step": 5 + }, + { + "expr": "sum(irate(mattermost_http_errors_total{instance=~\"$server\"}[1m]))", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "Total", + "metric": "mattermost_http_errors_total", + "refId": "B", + "step": 5 + }, + { + "expr": "irate(mattermost_http_errors_total[1m])", + "format": "time_series", + "hide": true, + "intervalFactor": 1, + "refId": "C" + } + ], + "thresholds": [ + { + "colorMode": "critical", + "fill": true, + "line": true, + "op": "gt", + "value": 5000 + } + ], + "timeFrom": null, + "timeShift": null, + "title": "Number of API Errors per Second", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": "Count", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "alert": { + "conditions": [ + { + "evaluator": { + "params": [ + 1000 + ], + "type": "gt" + }, + "operator": { + "type": "and" + }, + "query": { + "params": [ + "B", + "1m", + "now" + ] + }, + "reducer": { + "params": [], + "type": "avg" + }, + "type": "query" + } + ], + "executionErrorState": "alerting", + "frequency": "60s", + "handler": 1, + "name": "Mean API Request Time (in milliseconds) alert", + "noDataState": "no_data", + "notifications": [] + }, + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "editable": true, + "error": false, + "fill": 1, + "gridPos": { + "h": 7, + "w": 12, + "x": 0, + "y": 11 + }, + "id": 8, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "1000.0 * rate(mattermost_http_request_duration_seconds_sum{instance=~\"$server\"}[1m]) / rate(mattermost_http_request_duration_seconds_count{instance=~\"$server\"}[1m])", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "{{instance}}", + "metric": "mattermost_http_request_duration_seconds_sum", + "refId": "A", + "step": 5 + }, + { + "expr": "1000.0 * rate(mattermost_http_request_duration_seconds_sum[1m]) / rate(mattermost_http_request_duration_seconds_count[1m])", + "format": "time_series", + "hide": true, + "intervalFactor": 1, + "refId": "B" + } + ], + "thresholds": [ + { + "colorMode": "critical", + "fill": true, + "line": true, + "op": "gt", + "value": 1000 + } + ], + "timeFrom": null, + "timeShift": null, + "title": "Mean API Request Time (in milliseconds)", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": "Milliseconds (ms)", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "editable": true, + "error": false, + "fill": 1, + "gridPos": { + "h": 7, + "w": 12, + "x": 12, + "y": 11 + }, + "id": 19, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "mattermost_db_master_connections_total{instance=~\"$server\"}", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "{{instance}}", + "metric": "mattermost_db_master_connections_total", + "refId": "A", + "step": 5 + }, + { + "expr": "sum(mattermost_db_master_connections_total{instance=~\"$server\"})", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "Total", + "metric": "mattermost_db_master_connections_total", + "refId": "B", + "step": 5 + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Number of Connections to Master Database", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": "Count", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "editable": true, + "error": false, + "fill": 1, + "gridPos": { + "h": 7, + "w": 12, + "x": 0, + "y": 18 + }, + "id": 1, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "1000.0 * rate(mattermost_cluster_cluster_request_duration_seconds_sum{instance=~\"$server\"}[1m]) / rate(mattermost_cluster_cluster_request_duration_seconds_count{instance=~\"$server\"}[1m])", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "{{instance}}", + "metric": "mattermost_cluster_cluster_request_duration_seconds_sum", + "refId": "A", + "step": 5 + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Mean Cluster Request Time (in milliseconds)", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": "Milliseconds (ms)", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "editable": true, + "error": false, + "fill": 1, + "gridPos": { + "h": 7, + "w": 12, + "x": 12, + "y": 18 + }, + "id": 23, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "mattermost_db_read_replica_connections_total{instance=~\"$server\"}", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "{{job}}", + "metric": "mattermost_db_read_replica_connections_total", + "refId": "A", + "step": 5 + }, + { + "expr": "sum(mattermost_db_read_replica_connections_total{instance=~\"$server\"})", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "Total", + "metric": "mattermost_db_read_replica_connections_total", + "refId": "B", + "step": 5 + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Number of Connections to Read Replica Databases", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": "Count", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "editable": true, + "error": false, + "fill": 1, + "gridPos": { + "h": 7, + "w": 12, + "x": 0, + "y": 25 + }, + "id": 22, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "irate(mattermost_process_cpu_seconds_total{instance=~\"$server\"}[1m])* 100", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "{{instance}}", + "metric": "mattermost_process_cpu_seconds_total", + "refId": "A", + "step": 5 + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "CPU Utilization Rate (%)", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": "Utilization Rate (%)", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "editable": true, + "error": false, + "fill": 1, + "gridPos": { + "h": 7, + "w": 12, + "x": 12, + "y": 25 + }, + "id": 18, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "go_goroutines{instance=~\"$server\"}", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "{{instance}}", + "metric": "go_goroutines", + "refId": "A", + "step": 5 + }, + { + "expr": "sum(go_goroutines{instance=~\"$server\"})", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "Total", + "metric": "go_goroutines", + "refId": "B", + "step": 5 + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Number of Go Routines", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": "Count", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + } + ], + "refresh": "30s", + "schemaVersion": 16, + "style": "dark", + "tags": [ + "mattermost", + "mattermost-perf" + ], + "templating": { + "list": [ + { + "allValue": null, + "current": {}, + "datasource": "Prometheus", + "hide": 0, + "includeAll": false, + "label": "server", + "multi": true, + "name": "server", + "options": [], + "query": "label_values(instance)", + "refresh": 1, + "regex": "", + "sort": 0, + "tagValuesQuery": null, + "tags": [], + "tagsQuery": null, + "type": "query", + "useTags": false + } + ] + }, + "time": { + "from": "now-6h", + "to": "now" + }, + "timepicker": { + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ], + "time_options": [ + "5m", + "15m", + "1h", + "6h", + "12h", + "24h", + "2d", + "7d", + "30d" + ] + }, + "timezone": "browser", + "title": "Mattermost Performance KPI Metrics", + "uid": "000000010", + "version": 41 +} diff --git a/build/docker/grafana/dashboards/mattermost/mattermost-performance-monitoring-bonus-metrics_rev2.json b/build/docker/grafana/dashboards/mattermost/mattermost-performance-monitoring-bonus-metrics_rev2.json new file mode 100644 index 00000000..27fd1032 --- /dev/null +++ b/build/docker/grafana/dashboards/mattermost/mattermost-performance-monitoring-bonus-metrics_rev2.json @@ -0,0 +1,2387 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "gnetId": 2545, + "graphTooltip": 0, + "iteration": 1610119732885, + "links": [], + "panels": [ + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "editable": true, + "error": false, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 9, + "w": 24, + "x": 0, + "y": 0 + }, + "hiddenSeries": false, + "id": 7, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.6", + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "mattermost_login_logins_total{instance=~\"$server\"}/(mattermost_login_logins_total{instance=~\"$server\"}+mattermost_login_logins_fail_total{instance=~\"$server\"})", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "{{instance}}", + "metric": "mattermost_login_logins_total", + "refId": "A", + "step": 5 + }, + { + "expr": "sum(mattermost_login_logins_total{instance=~\"$server\"})/(sum(mattermost_login_logins_total{instance=~\"$server\"})+sum(mattermost_login_logins_fail_total{instance=~\"$server\"}))", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "All", + "metric": "mattermost_login_logins_total", + "refId": "B", + "step": 5 + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Number of Successful Logins / Number of Login Attempts", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "percentunit", + "label": "Login Success Rate", + "logBase": 1, + "max": "1", + "min": "0", + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "editable": true, + "error": false, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 9, + "w": 24, + "x": 0, + "y": 9 + }, + "hiddenSeries": false, + "id": 8, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.6", + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "irate(mattermost_login_logins_total{instance=~\"$server\"}[1m])", + "format": "time_series", + "interval": "1s", + "intervalFactor": 1, + "legendFormat": "{{instance}}", + "metric": "mattermost_login_logins_total", + "refId": "A", + "step": 2 + }, + { + "expr": "sum(irate(mattermost_login_logins_total{instance=~\"$server\"}[1m]))", + "format": "time_series", + "interval": "1s", + "intervalFactor": 1, + "legendFormat": "Total", + "refId": "B", + "step": 2 + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Number of Successful Logins per Second", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": "Count", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "editable": true, + "error": false, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 9, + "w": 24, + "x": 0, + "y": 18 + }, + "hiddenSeries": false, + "id": 9, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.6", + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "irate(mattermost_post_emails_sent_total{instance=~\"$server\"}[1m])", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "{{instance}}", + "metric": "mattermost_post_broadcasts_total", + "refId": "A", + "step": 5 + }, + { + "expr": "sum(irate(mattermost_post_emails_sent_total{instance=~\"$server\"}[1m]))", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "Total", + "metric": "mattermost_post_broadcasts_total", + "refId": "B", + "step": 5 + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Number of Emails Sent per Second", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": "Count", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "editable": true, + "error": false, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 9, + "w": 24, + "x": 0, + "y": 27 + }, + "hiddenSeries": false, + "id": 10, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.6", + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "irate(mattermost_post_pushes_sent_total{instance=~\"$server\"}[1m])", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "{{instance}}", + "refId": "A", + "step": 5 + }, + { + "expr": "sum(irate(mattermost_post_pushes_sent_total{instance=~\"$server\"}[1m]))", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "Total", + "refId": "B", + "step": 5 + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Number of Mobile Push Notifications Sent per Second", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": "Count", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "editable": true, + "error": false, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 9, + "w": 24, + "x": 0, + "y": 36 + }, + "hiddenSeries": false, + "id": 11, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.6", + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "irate(mattermost_post_file_attachments_total{instance=~\"$server\"}[1m])", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "{{instance}}", + "metric": "mattermost_post_file_attachments_total", + "refId": "A", + "step": 5 + }, + { + "expr": "sum(irate(mattermost_post_file_attachments_total{instance=~\"$server\"}[1m]))", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "Total", + "metric": "mattermost_post_file_attachments_total", + "refId": "B", + "step": 5 + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Number of File Attachments per Second", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": "Count", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 8, + "w": 24, + "x": 0, + "y": 45 + }, + "hiddenSeries": false, + "id": 23, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.6", + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "irate(mattermost_search_posts_searches_total{instance=~\"$server\"}[1m])", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "{{instance}}", + "refId": "A" + }, + { + "expr": "sum(irate(mattermost_search_posts_searches_total{instance=~\"$server\"}[1m]))", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "{{instance}}", + "refId": "B" + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Number of Search Queries for Posts per Second", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 8, + "w": 24, + "x": 0, + "y": 53 + }, + "hiddenSeries": false, + "id": 21, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "null as zero", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.6", + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "1000.0 * rate(mattermost_search_posts_searches_duration_seconds_sum{instance=~\"$server\"}[1m]) / rate(mattermost_search_posts_searches_duration_seconds_count{instance=~\"$server\"}[1m])", + "format": "time_series", + "instant": false, + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "{{instance}}", + "refId": "A" + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Mean Search Query Time for Posts (in milliseconds)", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": "Milliseconds (ms)", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "editable": true, + "error": false, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 12, + "w": 24, + "x": 0, + "y": 61 + }, + "hiddenSeries": false, + "id": 12, + "legend": { + "alignAsTable": true, + "avg": false, + "current": false, + "max": true, + "min": false, + "show": true, + "sort": "max", + "sortDesc": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "connected", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.6", + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "irate(mattermost_websocket_event_total{instance=~\"$server\"}[1m])", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "{{type}}", + "metric": "mattermost_websocket_event_total", + "refId": "A", + "step": 5 + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Number of Websocket Events per Second", + "tooltip": { + "msResolution": false, + "shared": false, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": "Count", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "editable": true, + "error": false, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 12, + "w": 24, + "x": 0, + "y": 73 + }, + "hiddenSeries": false, + "id": 13, + "legend": { + "alignAsTable": true, + "avg": false, + "current": false, + "max": true, + "min": false, + "show": true, + "sort": "max", + "sortDesc": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "connected", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.6", + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "irate(mattermost_websocket_broadcasts_total{instance=~\"$server\"}[1m])", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "{{name}}", + "metric": "mattermost_websocket_broadcasts_total", + "refId": "A", + "step": 5 + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Number of Websocket Broadcasts per Second", + "tooltip": { + "msResolution": false, + "shared": false, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": "Count", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "editable": true, + "error": false, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 9, + "w": 24, + "x": 0, + "y": 85 + }, + "hiddenSeries": false, + "id": 17, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.6", + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "irate(mattermost_http_requests_total{instance=~\"$server\"}[1m])", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "{{instance}}", + "metric": "mattermost_http_requests_total", + "refId": "A", + "step": 5 + }, + { + "expr": "sum(irate(mattermost_http_requests_total{instance=~\"$server\"}[1m]))", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "Total", + "metric": "mattermost_http_requests_total", + "refId": "B", + "step": 5 + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Number of API Requests per Second", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": "Count", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "editable": true, + "error": false, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 10, + "w": 24, + "x": 0, + "y": 94 + }, + "hiddenSeries": false, + "id": 18, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.6", + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "irate(mattermost_cluster_cluster_requests_total{instance=~\"$server\"}[1m])", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "{{instance}}", + "metric": "mattermost_cluster_cluster_requests_total", + "refId": "A", + "step": 5 + }, + { + "expr": "sum(irate(mattermost_cluster_cluster_requests_total{instance=~\"$server\"}[1m]))", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "Total", + "metric": "mattermost_cluster_cluster_requests_total", + "refId": "B", + "step": 5 + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Number of Cluster Requests per Second", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": "Count", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "editable": true, + "error": false, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 10, + "w": 24, + "x": 0, + "y": 104 + }, + "hiddenSeries": false, + "id": 19, + "legend": { + "alignAsTable": true, + "avg": false, + "current": false, + "max": true, + "min": false, + "show": true, + "sort": "max", + "sortDesc": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "connected", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.6", + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "irate(mattermost_cluster_cluster_event_type_totals{instance=~\"$server\"}[1m])", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "{{name}}", + "metric": "mattermost_websocket_broadcasts_total", + "refId": "A", + "step": 5 + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Number of Cluster Events per Second", + "tooltip": { + "msResolution": false, + "shared": false, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": "Count", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": false + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "editable": true, + "error": false, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 9, + "w": 24, + "x": 0, + "y": 114 + }, + "hiddenSeries": false, + "id": 5, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.6", + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "go_gc_duration_seconds{instance=~\"$server\"}", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "{{instance}} {{quantile}}", + "metric": "go_gc_duration_seconds", + "refId": "A", + "step": 5 + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Garbage Collection Duration by Quantile (in seconds)", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": "Seconds", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "editable": true, + "error": false, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 9, + "w": 24, + "x": 0, + "y": 123 + }, + "hiddenSeries": false, + "id": 6, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.6", + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "go_memstats_heap_objects{instance=~\"$server\"}", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "{{instance}}", + "metric": "go_memstats_heap_objects", + "refId": "A", + "step": 5 + }, + { + "expr": "sum(go_memstats_heap_objects{instance=~\"$server\"})", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "Total", + "metric": "go_memstats_heap_objects", + "refId": "B", + "step": 5 + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Number of Objects on the Heap", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": "Count", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "editable": true, + "error": false, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 9, + "w": 24, + "x": 0, + "y": 132 + }, + "hiddenSeries": false, + "id": 1, + "legend": { + "alignAsTable": true, + "avg": false, + "current": false, + "max": true, + "min": false, + "show": true, + "sort": "max", + "sortDesc": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "connected", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.6", + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": true, + "targets": [ + { + "expr": "mattermost_cache_etag_hit_total{instance=~\"$server\"}/(mattermost_cache_etag_miss_total{instance=~\"$server\"} + mattermost_cache_etag_hit_total{instance=~\"$server\"})", + "format": "time_series", + "interval": "30s", + "intervalFactor": 1, + "legendFormat": "{{route}}", + "metric": "mattermost_cache_etag_hit_total", + "refId": "A", + "step": 30 + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Etag Hit Rate", + "tooltip": { + "msResolution": false, + "shared": false, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "percentunit", + "label": "Hit Rate", + "logBase": 1, + "max": "1", + "min": "0", + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": false + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "editable": true, + "error": false, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 9, + "w": 24, + "x": 0, + "y": 141 + }, + "hiddenSeries": false, + "id": 2, + "legend": { + "alignAsTable": true, + "avg": false, + "current": false, + "max": true, + "min": false, + "show": true, + "sort": "max", + "sortDesc": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "connected", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.6", + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "mattermost_cache_mem_hit_total{instance=~\"$server\"}/(mattermost_cache_mem_miss_total{instance=~\"$server\"} + mattermost_cache_mem_hit_total{instance=~\"$server\"})", + "format": "time_series", + "interval": "30s", + "intervalFactor": 1, + "legendFormat": "{{name}}", + "metric": "mattermost_cache_mem_hit_total", + "refId": "A", + "step": 30 + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Memory Cache Hit Rate", + "tooltip": { + "msResolution": false, + "shared": false, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "percentunit", + "label": "Hit Rate", + "logBase": 1, + "max": "1", + "min": "0", + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": false + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": true, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "editable": true, + "error": false, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 9, + "w": 24, + "x": 0, + "y": 150 + }, + "hiddenSeries": false, + "id": 3, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": false, + "min": false, + "show": true, + "sort": "avg", + "sortDesc": true, + "total": false, + "values": true + }, + "lines": false, + "linewidth": 1, + "links": [], + "nullPointMode": "connected", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.6", + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "mattermost_cache_etag_hit_total{instance=~\"$server\"}/(mattermost_cache_etag_miss_total{instance=~\"$server\"} + mattermost_cache_etag_hit_total{instance=~\"$server\"})", + "format": "time_series", + "interval": "30s", + "intervalFactor": 1, + "legendFormat": "{{route}}", + "metric": "mattermost_cache_etag_hit_total", + "refId": "A", + "step": 30 + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Mean Etag Hit Rate (for each server)", + "tooltip": { + "msResolution": false, + "shared": false, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "series", + "name": null, + "show": true, + "values": [ + "avg" + ] + }, + "yaxes": [ + { + "format": "percentunit", + "label": "Hit Rate", + "logBase": 1, + "max": "1", + "min": "0", + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": false + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": true, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "editable": true, + "error": false, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 12, + "w": 24, + "x": 0, + "y": 159 + }, + "hiddenSeries": false, + "id": 4, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": false, + "min": false, + "show": true, + "sort": "avg", + "sortDesc": true, + "total": false, + "values": true + }, + "lines": false, + "linewidth": 1, + "links": [], + "nullPointMode": "connected", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.6", + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "mattermost_cache_mem_hit_total{instance=~\"$server\"}/(mattermost_cache_mem_miss_total{instance=~\"$server\"} + mattermost_cache_mem_hit_total{instance=~\"$server\"})", + "format": "time_series", + "interval": "30s", + "intervalFactor": 1, + "legendFormat": "{{name}}", + "metric": "mattermost_cache_etag_hit_total", + "refId": "A", + "step": 30 + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Mean Memory Cache Hit Rate (for each server)", + "tooltip": { + "msResolution": false, + "shared": false, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "series", + "name": null, + "show": true, + "values": [ + "avg" + ] + }, + "yaxes": [ + { + "format": "percentunit", + "label": "Hit Rate", + "logBase": 1, + "max": "1", + "min": "0", + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": false + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "editable": true, + "error": false, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 9, + "w": 24, + "x": 0, + "y": 171 + }, + "hiddenSeries": false, + "id": 14, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.6", + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "mattermost_process_resident_memory_bytes{instance=~\"$server\"}/1024/1024", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "{{instance}}", + "metric": "mattermost_process_resident_memory_bytes", + "refId": "A", + "step": 5 + }, + { + "expr": "sum(mattermost_process_resident_memory_bytes{instance=~\"$server\"}/1024/1024)", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "Total", + "metric": "mattermost_process_resident_memory_bytes", + "refId": "B", + "step": 5 + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Resident Memory Size (in MB)", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": "MB", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "editable": true, + "error": false, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 9, + "w": 24, + "x": 0, + "y": 180 + }, + "hiddenSeries": false, + "id": 15, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.6", + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "mattermost_process_virtual_memory_bytes{instance=~\"$server\"}/1024/1024", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "{{instance}}", + "metric": "mattermost_process_virtual_memory_bytes", + "refId": "A", + "step": 5 + }, + { + "expr": "sum(mattermost_process_virtual_memory_bytes{instance=~\"$server\"}/1024/1024)", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "Total", + "metric": "mattermost_process_virtual_memory_bytes", + "refId": "B", + "step": 5 + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Virtual Memory Size (in MB)", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": "MB", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "editable": true, + "error": false, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 9, + "w": 24, + "x": 0, + "y": 189 + }, + "hiddenSeries": false, + "id": 16, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.6", + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "mattermost_process_open_fds{instance=~\"$server\"}", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "{{instance}}", + "metric": "mattermost_process_open_fds", + "refId": "A", + "step": 5 + }, + { + "expr": "sum(mattermost_process_open_fds{instance=~\"$server\"})", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "Total", + "metric": "mattermost_process_open_fds", + "refId": "B", + "step": 5 + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Number of Open File Descriptors", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": "Count", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + } + ], + "refresh": "30s", + "schemaVersion": 26, + "style": "dark", + "tags": [ + "mattermost", + "mattermost-perf" + ], + "templating": { + "list": [ + { + "allValue": null, + "current": { + "selected": false, + "text": "host.docker.internal:8067", + "value": "host.docker.internal:8067" + }, + "datasource": "Prometheus", + "definition": "", + "error": null, + "hide": 0, + "includeAll": false, + "label": "server", + "multi": true, + "name": "server", + "options": [], + "query": "label_values(instance)", + "refresh": 1, + "regex": "", + "skipUrlSync": false, + "sort": 0, + "tagValuesQuery": null, + "tags": [], + "tagsQuery": null, + "type": "query", + "useTags": false + } + ] + }, + "time": { + "from": "now-6h", + "to": "now" + }, + "timepicker": { + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ], + "time_options": [ + "5m", + "15m", + "1h", + "6h", + "12h", + "24h", + "2d", + "7d", + "30d" + ] + }, + "timezone": "browser", + "title": "Mattermost Performance Monitoring (Bonus Metrics)", + "uid": "000000013", + "version": 1 +} diff --git a/build/docker/grafana/dashboards/mattermost/mattermost-performance-monitoring-v2.json b/build/docker/grafana/dashboards/mattermost/mattermost-performance-monitoring-v2.json new file mode 100644 index 00000000..1466a8d2 --- /dev/null +++ b/build/docker/grafana/dashboards/mattermost/mattermost-performance-monitoring-v2.json @@ -0,0 +1,3055 @@ +{ + "__elements": {}, + "__requires": [ + { + "type": "grafana", + "id": "grafana", + "name": "Grafana", + "version": "10.2.3" + }, + { + "type": "datasource", + "id": "prometheus", + "name": "Prometheus", + "version": "1.0.0" + }, + { + "type": "panel", + "id": "timeseries", + "name": "Time series", + "version": "" + } + ], + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "description": "A dashboard to monitor complete Mattermost application performance", + "editable": true, + "fiscalYearStartMonth": 0, + "gnetId": 15582, + "graphTooltip": 1, + "id": null, + "links": [], + "liveNow": false, + "panels": [ + { + "collapsed": false, + "datasource": { + "type": "prometheus", + "uid": "hwSeAIRSk" + }, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 0 + }, + "id": 53, + "panels": [], + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "hwSeAIRSk" + }, + "refId": "A" + } + ], + "title": "Websockets", + "type": "row" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": true, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 1 + }, + "id": 54, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "8.2.3", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "editorMode": "code", + "expr": "sum(mattermost_websocket_broadcast_buffer_size)", + "legendFormat": "Buffer size", + "range": true, + "refId": "A" + } + ], + "title": "Websocket Buffer Size", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": true, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 1 + }, + "id": 55, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "8.2.3", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "editorMode": "code", + "exemplar": true, + "expr": "sum(rate(mattermost_websocket_broadcasts_total[5m])) by (name)", + "interval": "", + "legendFormat": "{{name}}", + "range": true, + "refId": "A" + } + ], + "title": "Websocket Broadcasts", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": true, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 9 + }, + "id": 56, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "8.2.3", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "editorMode": "code", + "expr": "sum(increase(mattermost_websocket_reconnects_total[10m])) by (instance)", + "legendFormat": "{{instance}}", + "range": true, + "refId": "A" + } + ], + "title": "Websocket Reconnects", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": true, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 9 + }, + "id": 57, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "8.2.3", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "editorMode": "code", + "expr": "sum(rate(mattermost_websocket_event_total[5m])) by (type)", + "legendFormat": "{{type}}", + "range": true, + "refId": "A" + } + ], + "title": "Websocket Events", + "type": "timeseries" + }, + { + "collapsed": false, + "datasource": { + "type": "prometheus", + "uid": "hwSeAIRSk" + }, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 17 + }, + "id": 6, + "panels": [], + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "hwSeAIRSk" + }, + "refId": "A" + } + ], + "title": "Application Metrics", + "type": "row" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 18 + }, + "id": 12, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "10.2.3", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "expr": "rate(mattermost_http_requests_total{instance=~\"$server\"}[1m])", + "interval": "", + "legendFormat": "{{instance}}", + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "expr": "sum(rate(mattermost_http_requests_total{instance=~\"$server\"}[1m]))", + "interval": "", + "legendFormat": "Total", + "refId": "B" + } + ], + "title": "HTTP Requests per second", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 18 + }, + "id": 20, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "10.2.3", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "expr": "sum(rate(mattermost_db_store_time_count{instance=~\"$server\"}[5m])) by (instance)", + "interval": "", + "legendFormat": "{{instance}}", + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "expr": "sum(rate(mattermost_db_store_time_count{instance=~\"$server\"}[5m]))", + "interval": "", + "legendFormat": "Total", + "refId": "B" + } + ], + "title": "DB Calls per second", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "log": 2, + "type": "log" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "s" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 26 + }, + "id": 16, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "10.2.3", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "expr": "histogram_quantile(0.99, sum(rate(mattermost_api_time_bucket{instance=~\"$server\"}[1m])) by (instance,le))", + "interval": "", + "legendFormat": "p99-{{instance}}", + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "expr": "histogram_quantile(0.50, sum(rate(mattermost_api_time_bucket{instance=~\"$server\"}[1m])) by (instance,le))", + "interval": "", + "legendFormat": "p50-{{instance}}", + "refId": "B" + } + ], + "title": "API Latency", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "log": 2, + "type": "log" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "s" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 26 + }, + "id": 18, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "10.2.3", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "expr": "histogram_quantile(0.99, sum(rate(mattermost_db_store_time_bucket{instance=~\"$server\"}[1m])) by (instance,le))", + "interval": "", + "legendFormat": "p99-{{instance}}", + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "expr": "histogram_quantile(0.50, sum(rate(mattermost_db_store_time_bucket{instance=~\"$server\"}[1m])) by (instance,le))", + "interval": "", + "legendFormat": "p50-{{instance}}", + "refId": "B" + } + ], + "title": "Store latency", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 34 + }, + "id": 28, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "desc" + } + }, + "pluginVersion": "10.2.3", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "expr": "sum(increase(mattermost_db_store_time_count{instance=~\"$server\",method=~\"$top_db_count\"}[5m])) by (method)", + "interval": "", + "legendFormat": "{{method}}", + "refId": "A" + } + ], + "title": "Top 10 DB Calls by Count", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 34 + }, + "id": 30, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "desc" + } + }, + "pluginVersion": "10.2.3", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "expr": "sum(increase(mattermost_api_time_count{instance=~\"$server\",handler=~\"$top_api_count\"}[5m])) by (handler)", + "interval": "", + "legendFormat": "{{handler}}", + "refId": "A" + } + ], + "title": "Top 10 API Requests by Count", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "s" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 42 + }, + "id": 32, + "options": { + "legend": { + "calcs": [ + "mean", + "max", + "min" + ], + "displayMode": "table", + "placement": "right", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "10.2.3", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "expr": "sum(increase(mattermost_db_store_time_sum{instance=~\"$server\",method=~\"$top_db_latency\"}[5m])) by (method) / sum(increase(mattermost_db_store_time_count{instance=~\"$server\",method=~\"$top_db_latency\"}[5m])) by (method)", + "interval": "", + "legendFormat": "{{method}}", + "refId": "A" + } + ], + "title": "Top 10 DB calls by duration", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "s" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 42 + }, + "id": 34, + "options": { + "legend": { + "calcs": [ + "mean", + "max", + "min" + ], + "displayMode": "table", + "placement": "right", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "10.2.3", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "expr": "sum(increase(mattermost_api_time_sum{instance=~\"$server\",handler=~\"$top_api_latency\"}[5m])) by (handler) / sum(increase(mattermost_api_time_count{instance=~\"$server\",handler=~\"$top_api_latency\"}[5m])) by (handler)", + "interval": "", + "legendFormat": "{{handler}}", + "refId": "A" + } + ], + "title": "Top 10 API requests by duration", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "log": 2, + "type": "log" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "s" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 50 + }, + "id": 24, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "10.2.3", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "expr": "histogram_quantile (\n 0.99,\n sum by (le, instance)(\n rate(mattermost_api_time_bucket{instance=~\"$server\",handler=\"getPostsForChannelAroundLastUnread\"}[5m])\n )\n)", + "interval": "", + "legendFormat": "p99-{{instance}}", + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "expr": "histogram_quantile (\n 0.50,\n sum by (le, instance)(\n rate(mattermost_api_time_bucket{instance=~\"$server\",handler=\"getPostsForChannelAroundLastUnread\"}[5m])\n )\n)", + "interval": "", + "legendFormat": "p50-{{instance}}", + "refId": "B" + } + ], + "title": "Channel Load Duration", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "log": 2, + "type": "log" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "s" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 50 + }, + "id": 26, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "10.2.3", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "expr": "histogram_quantile (\n 0.99,\n sum by (le, instance)(\n rate(mattermost_api_time_bucket{instance=~\"$server\",handler=\"createPost\"}[5m])\n )\n)", + "interval": "", + "legendFormat": "p99-{{instance}}", + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "expr": "histogram_quantile (\n 0.50,\n sum by (le, instance)(\n rate(mattermost_api_time_bucket{instance=~\"$server\",handler=\"createPost\"}[5m])\n )\n)", + "interval": "", + "legendFormat": "p50-{{instance}}", + "refId": "B" + } + ], + "title": "CreatePost duration", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 58 + }, + "id": 14, + "options": { + "legend": { + "calcs": [ + "lastNotNull" + ], + "displayMode": "table", + "placement": "right", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "10.2.3", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "editorMode": "code", + "expr": "mattermost_http_websockets_total{instance=~\"$server\"}", + "interval": "", + "legendFormat": "{{instance}} - {{ origin_client}}", + "range": true, + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "expr": "sum(mattermost_http_websockets_total{instance=~\"$server\"})", + "interval": "", + "legendFormat": "Total", + "refId": "B" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "editorMode": "code", + "expr": "sum(mattermost_http_websockets_total{instance=~\"$server\"}) by (origin_client)", + "hide": false, + "interval": "", + "legendFormat": "Total - {{ origin_client }}", + "range": true, + "refId": "C" + } + ], + "title": "Number of connected devices (WebSocket Connections)", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 58 + }, + "id": 22, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "10.2.3", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "expr": "sum(mattermost_db_master_connections_total)", + "interval": "", + "legendFormat": "master", + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "expr": "sum(mattermost_db_read_replica_connections_total)", + "interval": "", + "legendFormat": "replica", + "refId": "B" + } + ], + "title": "DB Connections", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 9, + "w": 12, + "x": 0, + "y": 66 + }, + "id": 52, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "10.2.3", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "editorMode": "code", + "expr": "sum(go_sql_open_connections{db_name=~\"replica.*\"})", + "hide": false, + "legendFormat": "open_conns", + "range": true, + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "editorMode": "code", + "expr": "sum(go_sql_in_use_connections{db_name=~\"replica.*\"})", + "hide": false, + "legendFormat": "in_use_conns", + "range": true, + "refId": "B" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "editorMode": "code", + "expr": "sum(rate(go_sql_wait_count_total{db_name=~\"replica.*\"}[5m]))", + "hide": false, + "legendFormat": "wait_count", + "range": true, + "refId": "C" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "editorMode": "code", + "expr": "sum(rate(go_sql_max_idle_closed_total{db_name=~\"replica.*\",instance=~\"$server\"}[5m]))", + "hide": false, + "legendFormat": "max_idle_closed", + "range": true, + "refId": "D" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "editorMode": "code", + "expr": "sum(rate(go_sql_max_idle_time_closed_total{db_name=~\"replica.*\",instance=~\"$server\"}[5m]))", + "hide": false, + "legendFormat": "max_idle_time_closed", + "range": true, + "refId": "E" + } + ], + "title": "Connection Pool (Replica)", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 66 + }, + "id": 50, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "10.2.3", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "editorMode": "code", + "expr": "sum(go_sql_open_connections{db_name=\"master\"})", + "hide": false, + "legendFormat": "open_conns", + "range": true, + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "editorMode": "code", + "expr": "sum(go_sql_in_use_connections{db_name=\"master\"})", + "hide": false, + "legendFormat": "in_use_conns", + "range": true, + "refId": "B" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "editorMode": "code", + "expr": "sum(rate(go_sql_wait_count_total{db_name=\"master\"}[5m]))", + "hide": false, + "legendFormat": "wait_count", + "range": true, + "refId": "C" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "editorMode": "code", + "expr": "sum(rate(go_sql_max_idle_closed_total{db_name=\"master\"}[5m]))", + "hide": false, + "legendFormat": "max_idle_closed", + "range": true, + "refId": "D" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "editorMode": "code", + "expr": "sum(rate(go_sql_max_idle_time_closed_total{db_name=\"master\",instance=~\"$server\"}[5m]))", + "hide": false, + "legendFormat": "max_idle_time_closed", + "range": true, + "refId": "E" + } + ], + "title": "Connection Pool (Master)", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "This needs to be configured in config.json for it to work", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "none" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 75 + }, + "id": 44, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "10.2.3", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "expr": "mattermost_db_replica_lag_time{instance=~\"$server\"}", + "interval": "", + "legendFormat": "{{instance}}", + "refId": "A" + } + ], + "title": "Replica Lag", + "type": "timeseries" + }, + { + "collapsed": false, + "datasource": { + "type": "prometheus", + "uid": "hwSeAIRSk" + }, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 83 + }, + "id": 36, + "panels": [], + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "hwSeAIRSk" + }, + "refId": "A" + } + ], + "title": "Cluster Metrics", + "type": "row" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "Lower numbers are better, and zero means \"totally healthy\".", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 84 + }, + "id": 38, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "10.2.3", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "expr": "mattermost_cluster_cluster_health_score{instance=~\"$server\"}", + "interval": "", + "legendFormat": "{{instance}}", + "refId": "A" + } + ], + "title": "Cluster Health", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "s" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 84 + }, + "id": 40, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "desc" + } + }, + "pluginVersion": "10.2.3", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "expr": "histogram_quantile(0.99, sum(rate(mattermost_cluster_cluster_request_duration_seconds_bucket{instance=~\"$server\"}[5m])) by (le,instance))", + "interval": "", + "legendFormat": "p99-{{instance}}", + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "expr": "histogram_quantile(0.50, sum(rate(mattermost_cluster_cluster_request_duration_seconds_bucket{instance=~\"$server\"}[5m])) by (le,instance))", + "interval": "", + "legendFormat": "p50-{{instance}}", + "refId": "B" + } + ], + "title": "Cluster Request Duration", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 92 + }, + "id": 42, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "desc" + } + }, + "pluginVersion": "10.2.3", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "expr": "sum(rate(mattermost_cluster_cluster_request_duration_seconds_count{instance=~\"$server\"}[5m])) by (instance)", + "interval": "", + "legendFormat": "{{instance}}", + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "expr": "sum(rate(mattermost_cluster_cluster_request_duration_seconds_count{instance=~\"$server\"}[5m]))", + "interval": "", + "legendFormat": "Total", + "refId": "B" + } + ], + "title": "Cluster Requests Per Second", + "type": "timeseries" + }, + { + "collapsed": false, + "datasource": { + "type": "prometheus", + "uid": "hwSeAIRSk" + }, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 100 + }, + "id": 48, + "panels": [], + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "hwSeAIRSk" + }, + "refId": "A" + } + ], + "title": "Job Server", + "type": "row" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 101 + }, + "id": 46, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "10.2.3", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "expr": "mattermost_jobs_active{instance=~\"$server\"}", + "interval": "", + "legendFormat": "{{instance}}", + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "expr": "sum(mattermost_jobs_active{instance=~\"$server\"})", + "interval": "", + "legendFormat": "Total", + "refId": "B" + } + ], + "title": "Active Jobs", + "type": "timeseries" + }, + { + "collapsed": false, + "datasource": { + "type": "prometheus", + "uid": "hwSeAIRSk" + }, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 109 + }, + "id": 4, + "panels": [], + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "hwSeAIRSk" + }, + "refId": "A" + } + ], + "title": "System Metrics", + "type": "row" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 9, + "w": 12, + "x": 0, + "y": 110 + }, + "id": 2, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "10.2.3", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "expr": "irate(mattermost_process_cpu_seconds_total{instance=~\"$server\"}[5m])* 100", + "interval": "", + "legendFormat": "{{instance}}", + "refId": "A" + } + ], + "title": "CPU Utilization Percentage", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "decbytes" + }, + "overrides": [] + }, + "gridPos": { + "h": 9, + "w": 12, + "x": 12, + "y": 110 + }, + "id": 8, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "10.2.3", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "expr": "go_memstats_heap_inuse_bytes{instance=~\"$server\"}", + "interval": "", + "legendFormat": "{{instance}}", + "refId": "A" + } + ], + "title": "Heap Utilization", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 119 + }, + "id": 10, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "10.2.3", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "expr": "go_goroutines{instance=~\"$server\"}", + "interval": "", + "legendFormat": "{{instance}}", + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "expr": "sum(go_goroutines{instance=~\"$server\"})", + "interval": "", + "legendFormat": "Total", + "refId": "B" + } + ], + "title": "Goroutines", + "type": "timeseries" + } + ], + "refresh": "1m", + "schemaVersion": 39, + "tags": [], + "templating": { + "list": [ + { + "current": {}, + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "definition": "label_values(instance)", + "hide": 0, + "includeAll": false, + "label": "server", + "multi": true, + "name": "server", + "options": [], + "query": { + "query": "label_values(instance)", + "refId": "Prometheus-server-Variable-Query" + }, + "refresh": 1, + "regex": "", + "skipUrlSync": false, + "sort": 0, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + }, + { + "current": {}, + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "definition": "query_result(topk(10, sum(increase(mattermost_db_store_time_count{instance=~\"$server\"}[${__range_s}s])) by (method)))", + "hide": 0, + "includeAll": true, + "multi": true, + "name": "top_db_count", + "options": [], + "query": { + "query": "query_result(topk(10, sum(increase(mattermost_db_store_time_count{instance=~\"$server\"}[${__range_s}s])) by (method)))", + "refId": "Prometheus-top_db_count-Variable-Query" + }, + "refresh": 2, + "regex": ".*method=\"(.*?)\".*", + "skipUrlSync": false, + "sort": 0, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + }, + { + "current": {}, + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "definition": "query_result(topk(10, sum(increase(mattermost_db_store_time_sum{instance=~\"$server\"}[${__range_s}s])) by (method) / sum(increase(mattermost_db_store_time_count{instance=~\"$server\"}[${__range_s}s])) by (method)))", + "hide": 0, + "includeAll": true, + "multi": true, + "name": "top_db_latency", + "options": [], + "query": { + "query": "query_result(topk(10, sum(increase(mattermost_db_store_time_sum{instance=~\"$server\"}[${__range_s}s])) by (method) / sum(increase(mattermost_db_store_time_count{instance=~\"$server\"}[${__range_s}s])) by (method)))", + "refId": "Prometheus-top_db_latency-Variable-Query" + }, + "refresh": 2, + "regex": ".*method=\"(.*?)\".*", + "skipUrlSync": false, + "sort": 0, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + }, + { + "current": {}, + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "definition": "query_result(topk(10, sum(increase(mattermost_api_time_count{instance=~\"$server\"}[${__range_s}s])) by (handler)))", + "hide": 0, + "includeAll": true, + "multi": true, + "name": "top_api_count", + "options": [], + "query": { + "query": "query_result(topk(10, sum(increase(mattermost_api_time_count{instance=~\"$server\"}[${__range_s}s])) by (handler)))", + "refId": "Prometheus-top_api_count-Variable-Query" + }, + "refresh": 2, + "regex": ".*handler=\"(.*?)\".*", + "skipUrlSync": false, + "sort": 0, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + }, + { + "current": {}, + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "definition": "query_result(topk(10, sum(increase(mattermost_api_time_sum{instance=~\"$server\"}[${__range_s}s])) by (handler) / sum(increase(mattermost_api_time_count{instance=~\"$server\"}[${__range_s}s])) by (handler)))", + "hide": 0, + "includeAll": true, + "multi": true, + "name": "top_api_latency", + "options": [], + "query": { + "query": "query_result(topk(10, sum(increase(mattermost_api_time_sum{instance=~\"$server\"}[${__range_s}s])) by (handler) / sum(increase(mattermost_api_time_count{instance=~\"$server\"}[${__range_s}s])) by (handler)))", + "refId": "Prometheus-top_api_latency-Variable-Query" + }, + "refresh": 2, + "regex": ".*handler=\"(.*?)\".*", + "skipUrlSync": false, + "sort": 0, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + } + ] + }, + "time": { + "from": "now-6h", + "to": "now" + }, + "timepicker": {}, + "timezone": "", + "title": "Mattermost Performance Monitoring v2", + "uid": "im7xNX17kdd", + "version": 2, + "weekStart": "" +} diff --git a/build/docker/grafana/dashboards/mattermost/mattermost-performance-monitoring_rev2.json b/build/docker/grafana/dashboards/mattermost/mattermost-performance-monitoring_rev2.json new file mode 100644 index 00000000..84544ad6 --- /dev/null +++ b/build/docker/grafana/dashboards/mattermost/mattermost-performance-monitoring_rev2.json @@ -0,0 +1,1704 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + }, + { + "datasource": "Prometheus", + "enable": false, + "expr": "mattermost_system_server_start_time{instance=~\"$server\"}*1000", + "hide": false, + "iconColor": "#C8F2C2", + "limit": 100, + "name": "Server Start", + "showIn": 0, + "step": "5s", + "tagKeys": "instance", + "tags": [], + "titleFormat": "Server Started", + "type": "tags", + "useValueForTime": true + }, + { + "datasource": "Prometheus", + "enable": false, + "expr": "mattermost_jobs_active{instance=~\"$server\"}", + "hide": false, + "iconColor": "#FFF899", + "limit": 100, + "name": "Active Jobs", + "showIn": 0, + "step": "5s", + "tagKeys": "type", + "tags": [], + "titleFormat": "Active Job", + "type": "tags", + "useValueForTime": false + } + ] + }, + "editable": true, + "gnetId": 2542, + "graphTooltip": 0, + "iteration": 1610119520865, + "links": [], + "panels": [ + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "editable": true, + "error": false, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 9, + "w": 24, + "x": 0, + "y": 0 + }, + "hiddenSeries": false, + "id": 7, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.6", + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "irate(mattermost_post_total{instance=~\"$server\"}[1m])", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "{{instance}}", + "metric": "mattermost_post_total", + "refId": "A", + "step": 5 + }, + { + "expr": "sum(irate(mattermost_post_total{instance=~\"$server\"}[1m]))", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "Total", + "metric": "mattermost_post_total", + "refId": "B", + "step": 5 + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Number of Messages per Second", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": "Count", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "editable": true, + "error": false, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 9, + "w": 24, + "x": 0, + "y": 9 + }, + "hiddenSeries": false, + "id": 10, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.6", + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "irate(mattermost_http_errors_total{instance=~\"$server\"}[1m])", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "{{instance}}", + "metric": "mattermost_http_errors_total", + "refId": "A", + "step": 5 + }, + { + "expr": "sum(irate(mattermost_http_errors_total{instance=~\"$server\"}[1m]))", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "Total", + "metric": "mattermost_http_errors_total", + "refId": "B", + "step": 5 + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Number of API Errors per Second", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": "Count", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "alert": { + "conditions": [ + { + "evaluator": { + "params": [ + 36000 + ], + "type": "gt" + }, + "operator": { + "type": "and" + }, + "query": { + "params": [ + "A", + "10s", + "now" + ] + }, + "reducer": { + "params": [], + "type": "last" + }, + "type": "query" + } + ], + "executionErrorState": "alerting", + "frequency": "60s", + "handler": 1, + "message": "#grafana-alert", + "name": "Number of Connected Devices (WebSocket Connections) alert", + "noDataState": "no_data", + "notifications": [] + }, + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "editable": true, + "error": false, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 9, + "w": 24, + "x": 0, + "y": 18 + }, + "hiddenSeries": false, + "id": 6, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.6", + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "mattermost_http_websockets_total{instance=~\"$server\"}", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "{{instance}}", + "metric": "mattermost_http_websockets_total", + "refId": "A", + "step": 5 + }, + { + "expr": "sum(mattermost_http_websockets_total{instance=~\"$server\"})", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "Total", + "refId": "B", + "step": 5 + } + ], + "thresholds": [ + { + "colorMode": "critical", + "fill": true, + "line": true, + "op": "gt", + "value": 36000 + } + ], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Number of Connected Devices (WebSocket Connections)", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": "Count", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "editable": true, + "error": false, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 9, + "w": 24, + "x": 0, + "y": 27 + }, + "hiddenSeries": false, + "id": 1, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.6", + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "mattermost_db_master_connections_total{instance=~\"$server\"}", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "{{instance}}", + "metric": "mattermost_db_master_connections_total", + "refId": "A", + "step": 5 + }, + { + "expr": "sum(mattermost_db_master_connections_total{instance=~\"$server\"})", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "Total", + "metric": "mattermost_db_master_connections_total", + "refId": "B", + "step": 5 + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Number of Connections to Master Database", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": "Count", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "editable": true, + "error": false, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 9, + "w": 24, + "x": 0, + "y": 36 + }, + "hiddenSeries": false, + "id": 2, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.6", + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "mattermost_db_read_replica_connections_total{instance=~\"$server\"}", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "{{instance}}", + "metric": "mattermost_db_read_replica_connections_total", + "refId": "A", + "step": 5 + }, + { + "expr": "sum(mattermost_db_read_replica_connections_total{instance=~\"$server\"})", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "Total", + "metric": "mattermost_db_read_replica_connections_total", + "refId": "B", + "step": 5 + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Number of Connections to Read Replica Databases", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": "Count", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "editable": true, + "error": false, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 9, + "w": 24, + "x": 0, + "y": 45 + }, + "hiddenSeries": false, + "id": 3, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.6", + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "mattermost_db_search_replica_connections_total{instance=~\"$server\"}", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "{{instance}}", + "metric": "mattermost_db_search_replica_connections_total", + "refId": "A", + "step": 5 + }, + { + "expr": "sum(mattermost_db_search_replica_connections_total{instance=~\"$server\"})", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "Total", + "metric": "mattermost_db_search_replica_connections_total", + "refId": "B", + "step": 5 + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Number of Connections to Search Replica Databases", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": "Count", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "editable": true, + "error": false, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 9, + "w": 24, + "x": 0, + "y": 54 + }, + "hiddenSeries": false, + "id": 9, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.6", + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "sum(irate(mattermost_post_webhooks_total[1m]))", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "Webhooks", + "metric": "mattermost_post_webhooks_total", + "refId": "A", + "step": 5 + }, + { + "expr": "sum(irate(mattermost_post_total[1m])*10)-sum(irate(mattermost_post_webhooks_total[1m]))", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "Users", + "metric": "mattermost_post_total", + "refId": "B", + "step": 5 + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Number of Messages Per Second by Type", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": "Count", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "editable": true, + "error": false, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 9, + "w": 24, + "x": 0, + "y": 63 + }, + "hiddenSeries": false, + "id": 8, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.6", + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "irate(mattermost_post_broadcasts_total{instance=~\"$server\"}[1m])", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "{{instance}}", + "metric": "mattermost_post_broadcasts_total", + "refId": "A", + "step": 5 + }, + { + "expr": "sum(irate(mattermost_post_broadcasts_total{instance=~\"$server\"}[1m]))", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "Total", + "metric": "mattermost_post_broadcasts_total", + "refId": "B", + "step": 5 + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Number of Broadcasts per Second", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": "Count", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "editable": true, + "error": false, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 9, + "w": 24, + "x": 0, + "y": 72 + }, + "hiddenSeries": false, + "id": 13, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.6", + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "irate(mattermost_process_cpu_seconds_total{instance=~\"$server\"}[1m])* 100", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "{{instance}}", + "refId": "A", + "step": 5 + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "CPU Utilization Rate (%)", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": "Utilization Rate (%)", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "editable": true, + "error": false, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 9, + "w": 24, + "x": 0, + "y": 81 + }, + "hiddenSeries": false, + "id": 11, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.6", + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "1000.0 * rate(mattermost_http_request_duration_seconds_sum{instance=~\"$server\"}[1m]) / rate(mattermost_http_request_duration_seconds_count{instance=~\"$server\"}[1m])", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "{{instance}}", + "metric": "mattermost_http_request_duration_seconds_sum", + "refId": "A", + "step": 5 + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Mean API Request Time (in milliseconds)", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": "Milliseconds (ms)", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "editable": true, + "error": false, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 9, + "w": 24, + "x": 0, + "y": 90 + }, + "hiddenSeries": false, + "id": 12, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.6", + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "1000.0 * rate(mattermost_cluster_cluster_request_duration_seconds_sum{instance=~\"$server\"}[1m]) / rate(mattermost_cluster_cluster_request_duration_seconds_count{instance=~\"$server\"}[1m])", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "{{instance}}", + "refId": "A", + "step": 5 + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Mean Cluster Request Time (in milliseconds)", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": "Milliseconds (ms)", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "editable": true, + "error": false, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 9, + "w": 24, + "x": 0, + "y": 99 + }, + "hiddenSeries": false, + "id": 4, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.6", + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "go_goroutines{instance=~\"$server\"}", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "{{instance}}", + "metric": "go_goroutines", + "refId": "A", + "step": 5 + }, + { + "expr": "sum(go_goroutines{instance=~\"$server\"})", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "Total", + "metric": "go_goroutines", + "refId": "B", + "step": 5 + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Number of Go Routines", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": "Count", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "editable": true, + "error": false, + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 9, + "w": 24, + "x": 0, + "y": 108 + }, + "hiddenSeries": false, + "id": 5, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.6", + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "go_memstats_alloc_bytes{instance=~\"$server\"}/1024/1024", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "{{instance}}", + "metric": "go_memstats_alloc_bytes_total", + "refId": "A", + "step": 5 + }, + { + "expr": "sum(go_memstats_alloc_bytes{instance=~\"$server\"})/1024/1024", + "format": "time_series", + "interval": "5s", + "intervalFactor": 1, + "legendFormat": "Total", + "metric": "go_memstats_alloc_bytes_total", + "refId": "B", + "step": 5 + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Memory Usage (in MB)", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": "MB", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": null, + "description": "A record of any job detected as being active within the configured Prometheus polling interval.", + "fieldConfig": { + "defaults": { + "custom": {} + }, + "overrides": [] + }, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 9, + "w": 24, + "x": 0, + "y": 117 + }, + "hiddenSeries": false, + "id": 15, + "legend": { + "alignAsTable": false, + "avg": false, + "current": false, + "hideEmpty": true, + "hideZero": true, + "max": false, + "min": false, + "rightSide": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "options": { + "alertThreshold": true + }, + "percentage": false, + "pluginVersion": "7.3.6", + "pointradius": 2, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": true, + "targets": [ + { + "expr": "mattermost_jobs_active{instance=~\"$server\"}", + "instant": false, + "interval": "", + "legendFormat": "", + "refId": "A" + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Active Jobs", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": "Active", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + } + ], + "refresh": "5s", + "schemaVersion": 26, + "style": "dark", + "tags": [ + "mattermost", + "mattermost-perf" + ], + "templating": { + "list": [ + { + "allValue": null, + "current": { + "selected": false, + "text": "host.docker.internal:8067", + "value": "host.docker.internal:8067" + }, + "datasource": "Prometheus", + "definition": "", + "error": null, + "hide": 0, + "includeAll": false, + "label": "server", + "multi": true, + "name": "server", + "options": [], + "query": "label_values(instance)", + "refresh": 1, + "regex": "", + "skipUrlSync": false, + "sort": 0, + "tagValuesQuery": null, + "tags": [], + "tagsQuery": null, + "type": "query", + "useTags": false + } + ] + }, + "time": { + "from": "now-6h", + "to": "now" + }, + "timepicker": { + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ], + "time_options": [ + "5m", + "15m", + "1h", + "6h", + "12h", + "24h", + "2d", + "7d", + "30d" + ] + }, + "timezone": "browser", + "title": "Mattermost Performance Monitoring", + "uid": "000000011", + "version": 1 +} diff --git a/build/docker/grafana/dashboards/mattermost/mattermost-web-app-metrics.json b/build/docker/grafana/dashboards/mattermost/mattermost-web-app-metrics.json new file mode 100644 index 00000000..49ada4f6 --- /dev/null +++ b/build/docker/grafana/dashboards/mattermost/mattermost-web-app-metrics.json @@ -0,0 +1,3387 @@ +{ + "__elements": {}, + "__requires": [ + { + "type": "grafana", + "id": "grafana", + "name": "Grafana", + "version": "11.1.0" + }, + { + "type": "datasource", + "id": "prometheus", + "name": "Prometheus", + "version": "1.0.0" + }, + { + "type": "panel", + "id": "stat", + "name": "Stat", + "version": "" + }, + { + "type": "panel", + "id": "timeseries", + "name": "Time series", + "version": "" + } + ], + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": { + "type": "grafana", + "uid": "-- Grafana --" + }, + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 1, + "id": null, + "links": [], + "liveNow": false, + "panels": [ + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 0 + }, + "id": 7, + "panels": [], + "title": "Interactivity", + "type": "row" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "The time between when a user clicks on a channel in the LHS and when posts in that channel are shown to them, measured as a percentile of all requests in a given 30 minute period", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "line" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 10 + } + ] + }, + "unit": "s" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 10, + "x": 0, + "y": 1 + }, + "id": 1, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "10.3.3", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "editorMode": "code", + "exemplar": false, + "expr": "histogram_quantile($percentile / 100, sum by(le) (increase(mattermost_webapp_channel_switch_bucket{agent=~\"$agent\",platform=~\"$platform\"}[$decay])))", + "format": "time_series", + "hide": false, + "instant": false, + "legendFormat": "[[percentile]]th Percentile", + "range": true, + "refId": "A" + } + ], + "title": "Channel Switch", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "The [[percentile]]th percentile of all Channel Switch measurements over the selected time period and the number of measurements taken during that time.\n\nTarget values for P75:\n- Good: 250ms\n- Needs Improvement: 500ms", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "#EAB839", + "value": 0.25 + }, + { + "color": "red", + "value": 0.5 + } + ] + }, + "unit": "s" + }, + "overrides": [ + { + "matcher": { + "id": "byName", + "options": "Count" + }, + "properties": [ + { + "id": "unit", + "value": "none" + }, + { + "id": "color", + "value": { + "fixedColor": "text", + "mode": "fixed" + } + } + ] + } + ] + }, + "gridPos": { + "h": 16, + "w": 2, + "x": 10, + "y": 1 + }, + "id": 23, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "center", + "orientation": "auto", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "value_and_name", + "wideLayout": false + }, + "pluginVersion": "11.1.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "exemplar": false, + "expr": "histogram_quantile($percentile / 100, sum by(le) (increase(mattermost_webapp_channel_switch_bucket{agent=~\"$agent\",platform=~\"$platform\"}[$__range])))", + "format": "time_series", + "fullMetaSearch": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "[[percentile]]th Percentile", + "range": true, + "refId": "A", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "exemplar": false, + "expr": "sum(increase(mattermost_webapp_channel_switch_count{agent=~\"$agent\",platform=~\"$platform\"}[$__range]))", + "format": "time_series", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "Count", + "range": true, + "refId": "B", + "useBackend": false + } + ], + "title": "Channel Switch (Overall)", + "type": "stat" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "The time between when a user clicks on a team in the LHS and when posts in a channel on that team are shown to them", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "line" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 10 + } + ] + }, + "unit": "s" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 10, + "x": 12, + "y": 1 + }, + "id": 2, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "histogram_quantile($percentile / 100, sum by(le) (increase(mattermost_webapp_team_switch_bucket{agent=~\"$agent\",platform=~\"$platform\"}[$decay])))", + "fullMetaSearch": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "[[percentile]]th Percentile", + "range": true, + "refId": "A", + "useBackend": false + } + ], + "title": "Team Switch", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "The [[percentile]]th percentile of all Team Switch measurements over the selected time period and the number of measurements taken during that time.\n\nTarget values for P75:\n- Good: 750ms\n- Needs Improvement: 1.25s", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "#EAB839", + "value": 0.75 + }, + { + "color": "red", + "value": 1.25 + } + ] + }, + "unit": "s" + }, + "overrides": [ + { + "matcher": { + "id": "byName", + "options": "Count" + }, + "properties": [ + { + "id": "unit", + "value": "none" + }, + { + "id": "color", + "value": { + "fixedColor": "text", + "mode": "fixed" + } + } + ] + } + ] + }, + "gridPos": { + "h": 16, + "w": 2, + "x": 22, + "y": 1 + }, + "id": 31, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "center", + "orientation": "auto", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "value_and_name", + "wideLayout": false + }, + "pluginVersion": "11.1.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "exemplar": false, + "expr": "histogram_quantile($percentile / 100, sum by(le) (increase(mattermost_webapp_team_switch_bucket{agent=~\"$agent\",platform=~\"$platform\"}[$__range])))", + "format": "time_series", + "fullMetaSearch": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "[[percentile]]th Percentile", + "range": true, + "refId": "A", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "exemplar": false, + "expr": "sum(increase(mattermost_webapp_team_switch_count{agent=~\"$agent\",platform=~\"$platform\"}[$__range]))", + "format": "time_series", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "Count", + "range": true, + "refId": "B", + "useBackend": false + } + ], + "title": "Team Switch (Overall)", + "type": "stat" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "none" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 10, + "x": 0, + "y": 9 + }, + "id": 12, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "sum(increase(mattermost_webapp_channel_switch_count{agent=~\"$agent\",platform=~\"$platform\"}[$decay]))", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "# of Reports", + "range": true, + "refId": "B", + "useBackend": false + } + ], + "title": "# of Channel Switches", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "none" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 10, + "x": 12, + "y": 9 + }, + "id": 13, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "sum(increase(mattermost_webapp_team_switch_count{agent=~\"$agent\",platform=~\"$platform\"}[$decay]))", + "format": "time_series", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": false, + "instant": false, + "interval": "", + "legendFormat": "# of Reports", + "range": true, + "refId": "B", + "useBackend": false + } + ], + "title": "# of Team Switches", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "The time between when a user clicks to open the RHS and when the VirtualizedThreadViewer component is mounted, measured as a percentile of all requests in a given 30 minute period", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "line" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 10 + } + ] + }, + "unit": "s" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 10, + "x": 0, + "y": 17 + }, + "id": 19, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "10.3.3", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "histogram_quantile($percentile / 100, sum by(le) (increase(mattermost_webapp_rhs_load_bucket{agent=~\"$agent\",platform=~\"$platform\"}[$decay])))", + "format": "time_series", + "fullMetaSearch": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "[[percentile]]th Percentile", + "range": true, + "refId": "A", + "useBackend": false + } + ], + "title": "RHS Load", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "The [[percentile]]th percentile of all RHS Load measurements over the selected time period and the number of measurements taken during that time.\n\nTarget values for P75:\n- Good: 350ms\n- Needs Improvement: 600ms", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "#EAB839", + "value": 0.35 + }, + { + "color": "red", + "value": 0.6 + } + ] + }, + "unit": "s" + }, + "overrides": [ + { + "matcher": { + "id": "byName", + "options": "Count" + }, + "properties": [ + { + "id": "unit" + }, + { + "id": "color", + "value": { + "fixedColor": "text", + "mode": "fixed" + } + } + ] + } + ] + }, + "gridPos": { + "h": 16, + "w": 2, + "x": 10, + "y": 17 + }, + "id": 27, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "center", + "orientation": "auto", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "value_and_name", + "wideLayout": false + }, + "pluginVersion": "11.1.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "exemplar": false, + "expr": "histogram_quantile($percentile / 100, sum by(le) (increase(mattermost_webapp_rhs_load_bucket{agent=~\"$agent\",platform=~\"$platform\"}[$__range])))", + "format": "time_series", + "fullMetaSearch": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "[[percentile]]th Percentile", + "range": true, + "refId": "A", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "editorMode": "code", + "expr": "sum(increase(mattermost_webapp_rhs_load_count{agent=~\"$agent\",platform=~\"$platform\"}[$__range]))", + "hide": false, + "instant": false, + "legendFormat": "Count", + "range": true, + "refId": "B" + } + ], + "title": "RHS Load (Overall)", + "type": "stat" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "The time between when a user clicks on Threads in the LHS and when a list of threads is rendered, measured as a percentile of all requests in a given 30 minute period", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "line" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 10 + } + ] + }, + "unit": "s" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 10, + "x": 12, + "y": 17 + }, + "id": 21, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "10.3.3", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "histogram_quantile($percentile / 100, sum by(le) (increase(mattermost_webapp_global_threads_load_bucket{agent=~\"$agent\",platform=~\"$platform\"}[$decay])))", + "format": "time_series", + "fullMetaSearch": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "[[percentile]]th Percentile", + "range": true, + "refId": "A", + "useBackend": false + } + ], + "title": "Threads List Load", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "The [[percentile]]th percentile of all Threads List Load measurements over the selected time period and the number of measurements taken during that time.\n\nTarget values for P75:\n- Good: 500ms\n- Needs Improvement: 800ms", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "#EAB839", + "value": 0.5 + }, + { + "color": "red", + "value": 0.8 + } + ] + }, + "unit": "s" + }, + "overrides": [ + { + "matcher": { + "id": "byName", + "options": "Count" + }, + "properties": [ + { + "id": "unit" + }, + { + "id": "color", + "value": { + "fixedColor": "text", + "mode": "fixed" + } + } + ] + } + ] + }, + "gridPos": { + "h": 16, + "w": 2, + "x": 22, + "y": 17 + }, + "id": 32, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "center", + "orientation": "auto", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "value_and_name", + "wideLayout": false + }, + "pluginVersion": "11.1.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "exemplar": false, + "expr": "histogram_quantile($percentile / 100, sum by(le) (increase(mattermost_webapp_global_threads_load_bucket{agent=~\"$agent\",platform=~\"$platform\"}[$__range])))", + "format": "time_series", + "fullMetaSearch": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "[[percentile]]th Percentile", + "range": true, + "refId": "A", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "sum(increase(mattermost_webapp_global_threads_load_count{agent=~\"$agent\",platform=~\"$platform\"}[$__range]))", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "Count", + "range": true, + "refId": "B", + "useBackend": false + } + ], + "title": "Threads List Load (Overall)", + "type": "stat" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "none" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 10, + "x": 0, + "y": 25 + }, + "id": 20, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "sum(increase(mattermost_webapp_rhs_load_count{agent=~\"$agent\",platform=~\"$platform\"}[$decay]))", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "# of Reports", + "range": true, + "refId": "B", + "useBackend": false + } + ], + "title": "# of RHS Loads", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "none" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 10, + "x": 12, + "y": 25 + }, + "id": 22, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "sum(increase(mattermost_webapp_global_threads_load_count{agent=~\"$agent\",platform=~\"$platform\"}[$decay]))", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "# of Reports", + "range": true, + "refId": "B", + "useBackend": false + } + ], + "title": "# of Threads List Loads", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "Tracks the duration of the longest click or keyboard interaction during an application's lifecycle (Desktop/Chrome/Edge only)", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "red", + "value": 10 + } + ] + }, + "unit": "s" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 10, + "x": 0, + "y": 33 + }, + "id": 6, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "histogram_quantile($percentile / 100, sum by(le) (increase(mattermost_webapp_interaction_to_next_paint_bucket{agent=~\"$agent\",platform=~\"$platform\"}[$decay])))", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "[[percentile]]th Percentile", + "range": true, + "refId": "B", + "useBackend": false + } + ], + "title": "Interaction to Next Paint (INP)", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "The [[percentile]]th percentile of all Interaction to Next Paint measurements over the selected time period and the number of measurements taken during that time.\n\nTarget values for P75:\n- Good: 200ms\n- Needs Improvement: 500ms", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "#EAB839", + "value": 0.2 + }, + { + "color": "red", + "value": 0.5 + } + ] + }, + "unit": "s" + }, + "overrides": [ + { + "matcher": { + "id": "byName", + "options": "Count" + }, + "properties": [ + { + "id": "unit" + }, + { + "id": "color", + "value": { + "fixedColor": "text", + "mode": "fixed" + } + } + ] + } + ] + }, + "gridPos": { + "h": 16, + "w": 2, + "x": 10, + "y": 33 + }, + "id": 33, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "center", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "value_and_name", + "wideLayout": false + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "exemplar": false, + "expr": "histogram_quantile($percentile / 100, sum by(le) (increase(mattermost_webapp_interaction_to_next_paint_bucket{agent=~\"$agent\",platform=~\"$platform\"}[$__range])))", + "format": "time_series", + "fullMetaSearch": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "[[percentile]]th Percentile", + "range": true, + "refId": "A", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "sum(increase(mattermost_webapp_interaction_to_next_paint_count{agent=~\"$agent\",platform=~\"$platform\"}[$__range]))", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "Count", + "range": true, + "refId": "B", + "useBackend": false + } + ], + "title": "INP (Overall)", + "type": "stat" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "The number of times that the main UI thread of a browser was blocked for more than 50ms (Desktop/Chrome/Edge only)", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "none" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 33 + }, + "id": 10, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "sum(rate(mattermost_webapp_long_tasks{agent=~\"$agent\",platform=~\"$platform\"}[10m]))", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "# of Long Tasks per second", + "range": true, + "refId": "B", + "useBackend": false + } + ], + "title": "Long Tasks", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "none" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 10, + "x": 0, + "y": 41 + }, + "id": 17, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "sum(increase(mattermost_webapp_interaction_to_next_paint_count{agent=~\"$agent\",platform=~\"$platform\"}[$decay]))", + "format": "time_series", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": false, + "instant": false, + "interval": "", + "legendFormat": "# of Reports", + "range": true, + "refId": "B", + "useBackend": false + } + ], + "title": "# of INPs", + "type": "timeseries" + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 49 + }, + "id": 5, + "panels": [], + "title": "Initial Load", + "type": "row" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "Tracks the time from when a browser first requests data from the server until when the browser starts to receive a response", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "red", + "value": 10 + } + ] + }, + "unit": "s" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 10, + "x": 0, + "y": 50 + }, + "id": 4, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "histogram_quantile($percentile / 100, sum by(le) (increase(mattermost_webapp_time_to_first_byte_bucket{agent=~\"$agent\",platform=~\"$platform\"}[$decay])))", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "[[percentile]]th Percentile", + "range": true, + "refId": "B", + "useBackend": false + } + ], + "title": "Time to First Byte (TTFB)", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "The [[percentile]]th percentile of all Time To First Byte measurements over the selected time period and the number of measurements taken during that time.\n\nTarget values for P75:\n- Good: 800ms\n- Needs Improvement: 1.8s", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "#EAB839", + "value": 0.8 + }, + { + "color": "red", + "value": 1.8 + } + ] + }, + "unit": "s" + }, + "overrides": [ + { + "matcher": { + "id": "byName", + "options": "Count" + }, + "properties": [ + { + "id": "unit" + }, + { + "id": "color", + "value": { + "fixedColor": "text", + "mode": "fixed" + } + } + ] + } + ] + }, + "gridPos": { + "h": 16, + "w": 2, + "x": 10, + "y": 50 + }, + "id": 34, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "center", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "value_and_name", + "wideLayout": false + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "exemplar": false, + "expr": "histogram_quantile($percentile / 100, sum by(le) (increase(mattermost_webapp_time_to_first_byte_bucket{agent=~\"$agent\",platform=~\"$platform\"}[$__range])))", + "format": "time_series", + "fullMetaSearch": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "[[percentile]]th Percentile", + "range": true, + "refId": "A", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "sum(increase(mattermost_webapp_time_to_first_byte_count{agent=~\"$agent\",platform=~\"$platform\"}[$__range]))", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "Count", + "range": true, + "refId": "B", + "useBackend": false + } + ], + "title": "TTFB (Overall)", + "type": "stat" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "The time between when the browser starts to load the web app and when the `window.load` event completes which is before much of the web app is loaded.", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "line" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "red", + "value": 10 + } + ] + }, + "unit": "s" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 10, + "x": 12, + "y": 50 + }, + "id": 11, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "histogram_quantile($percentile / 100, sum by(le) (increase(mattermost_webapp_page_load_bucket{agent=~\"$agent\",platform=~\"$platform\"}[$decay])))", + "fullMetaSearch": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "[[percentile]]th Percentile", + "range": true, + "refId": "A", + "useBackend": false + } + ], + "title": "Load Event End (Formerly Page Load)", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "The [[percentile]]th percentile of all Load Event End measurements over the selected time period and the number of measurements taken during that time.\n\nTarget values for P75:\n- Good: 2.25s\n- Needs Improvement: 4.5s", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "#EAB839", + "value": 2.25 + }, + { + "color": "red", + "value": 4.5 + } + ] + }, + "unit": "s" + }, + "overrides": [ + { + "matcher": { + "id": "byName", + "options": "Count" + }, + "properties": [ + { + "id": "unit" + }, + { + "id": "color", + "value": { + "fixedColor": "text", + "mode": "fixed" + } + } + ] + } + ] + }, + "gridPos": { + "h": 16, + "w": 2, + "x": 22, + "y": 50 + }, + "id": 35, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "center", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "value_and_name", + "wideLayout": false + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "exemplar": false, + "expr": "histogram_quantile($percentile / 100, sum by(le) (increase(mattermost_webapp_page_load_bucket{agent=~\"$agent\",platform=~\"$platform\"}[$__range])))", + "format": "time_series", + "fullMetaSearch": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "[[percentile]]th Percentile", + "range": true, + "refId": "A", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "sum(increase(mattermost_webapp_page_load_count{agent=~\"$agent\",platform=~\"$platform\"}[$__range]))", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "Count", + "range": true, + "refId": "B", + "useBackend": false + } + ], + "title": "Load Event End (Overall)", + "type": "stat" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "none" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 10, + "x": 0, + "y": 58 + }, + "id": 15, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "sum(increase(mattermost_webapp_time_to_first_byte_count{agent=~\"$agent\",platform=~\"$platform\"}[$decay]))", + "format": "time_series", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": false, + "instant": false, + "interval": "", + "legendFormat": "# of Reports", + "range": true, + "refId": "B", + "useBackend": false + } + ], + "title": "# of TTFBs", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "none" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 10, + "x": 12, + "y": 58 + }, + "id": 14, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "sum(increase(mattermost_webapp_page_load_count{agent=~\"$agent\",platform=~\"$platform\"}[$decay]))", + "format": "time_series", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": false, + "instant": false, + "interval": "", + "legendFormat": "# of Reports", + "range": true, + "refId": "B", + "useBackend": false + } + ], + "title": "# of Load Event Ends", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "FCP tracks the amount of time taken from when a page starts to load to when the first content on that page is visible. LCP tracks the amount of time taken until a large piece of content is visible. (Only on Desktop/Firefox/Chrome/Edge)", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "line" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "red", + "value": 20 + } + ] + }, + "unit": "s" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 10, + "x": 0, + "y": 66 + }, + "id": 3, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "histogram_quantile($percentile / 100, sum by(le) (increase(mattermost_webapp_first_contentful_paint_bucket{agent=~\"$agent\",platform=~\"$platform\"}[$decay])))", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "FCP [[percentile]]th Percentile", + "range": true, + "refId": "B", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "histogram_quantile($percentile / 100, sum by(le) (increase(mattermost_webapp_largest_contentful_paint_bucket{agent=~\"$agent\",platform=~\"$platform\"}[$decay])))", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "LCP [[percentile]]th Percentile", + "range": true, + "refId": "C", + "useBackend": false + } + ], + "title": "First Contentful Paint (FCP)/Largest Contentful Paint (LCP)", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "The [[percentile]]th percentile of all First Contentful Paint measurements over the selected time period and the number of measurements taken during that time.\n\nTarget values for P75:\n- Good: 1.8s\n- Needs Improvement: 3s", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "#EAB839", + "value": 1.8 + }, + { + "color": "red", + "value": 3 + } + ] + }, + "unit": "s" + }, + "overrides": [ + { + "matcher": { + "id": "byName", + "options": "Count" + }, + "properties": [ + { + "id": "unit" + }, + { + "id": "color", + "value": { + "fixedColor": "text", + "mode": "fixed" + } + } + ] + } + ] + }, + "gridPos": { + "h": 8, + "w": 2, + "x": 10, + "y": 66 + }, + "id": 36, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "center", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "value_and_name", + "wideLayout": false + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "exemplar": false, + "expr": "histogram_quantile($percentile / 100, sum by(le) (increase(mattermost_webapp_first_contentful_paint_bucket{agent=~\"$agent\",platform=~\"$platform\"}[$__range])))", + "format": "time_series", + "fullMetaSearch": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "[[percentile]]th Percentile", + "range": true, + "refId": "A", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "sum(increase(mattermost_webapp_first_contentful_paint_count{agent=~\"$agent\",platform=~\"$platform\"}[$__range]))", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "Count", + "range": true, + "refId": "B", + "useBackend": false + } + ], + "title": "FCP (Overall)", + "type": "stat" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "A measurement of how much page layout shifts unexpectedly (Desktop/Chrome/Edge only)", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "red", + "value": 10 + } + ] + }, + "unit": "none" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 10, + "x": 12, + "y": 66 + }, + "id": 8, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "histogram_quantile($percentile / 100, sum by(le) (increase(mattermost_webapp_cumulative_layout_shift_bucket{agent=~\"$agent\",platform=~\"$platform\"}[$decay])))", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "[[percentile]]th Percentile", + "range": true, + "refId": "B", + "useBackend": false + } + ], + "title": "Cumulative Layout Shift (CLS)", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "The [[percentile]]th percentile of all Cumulative Layout Shift measurements over the selected time period and the number of measurements taken during that time.\n\nTarget values for P75:\n- Good: 0.1\n- Needs Improvement: 0.25", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "#EAB839", + "value": 0.1 + }, + { + "color": "red", + "value": 0.25 + } + ] + }, + "unit": "none" + }, + "overrides": [ + { + "matcher": { + "id": "byName", + "options": "Count" + }, + "properties": [ + { + "id": "unit" + }, + { + "id": "color", + "value": { + "fixedColor": "text", + "mode": "fixed" + } + } + ] + } + ] + }, + "gridPos": { + "h": 16, + "w": 2, + "x": 22, + "y": 66 + }, + "id": 37, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "center", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "value_and_name", + "wideLayout": false + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "exemplar": false, + "expr": "histogram_quantile($percentile / 100, sum by(le) (increase(mattermost_webapp_cumulative_layout_shift_bucket{agent=~\"$agent\",platform=~\"$platform\"}[$__range])))", + "format": "time_series", + "fullMetaSearch": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "[[percentile]]th Percentile", + "range": true, + "refId": "A", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "sum(increase(mattermost_webapp_cumulative_layout_shift_count{agent=~\"$agent\",platform=~\"$platform\"}[$__range]))", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "Count", + "range": true, + "refId": "B", + "useBackend": false + } + ], + "title": "CLS (Overall)", + "type": "stat" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "none" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 10, + "x": 0, + "y": 74 + }, + "id": 16, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "sum(increase(mattermost_webapp_first_contentful_paint_count{agent=~\"$agent\",platform=~\"$platform\"}[10m]))", + "format": "time_series", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": false, + "instant": false, + "interval": "", + "legendFormat": "# of FCP Reports", + "range": true, + "refId": "B", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "sum(increase(mattermost_webapp_largest_contentful_paint_count{agent=~\"$agent\",platform=~\"$platform\"}[10m]))", + "format": "time_series", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": false, + "instant": false, + "interval": "", + "legendFormat": "# of LCP Reports", + "range": true, + "refId": "A", + "useBackend": false + } + ], + "title": "# of FCPs/LCPs", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "The [[percentile]]th percentile of all Largest Contentful Paint measurements over the selected time period and the number of measurements taken during that time.\n\nTarget values for P75:\n- Good: 2.5s\n- Needs Improvement: 4s", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "#EAB839", + "value": 2.5 + }, + { + "color": "red", + "value": 4 + } + ] + }, + "unit": "s" + }, + "overrides": [ + { + "matcher": { + "id": "byName", + "options": "Count" + }, + "properties": [ + { + "id": "unit" + }, + { + "id": "color", + "value": { + "fixedColor": "text", + "mode": "fixed" + } + } + ] + } + ] + }, + "gridPos": { + "h": 8, + "w": 2, + "x": 10, + "y": 74 + }, + "id": 38, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "center", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "value_and_name", + "wideLayout": false + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "exemplar": false, + "expr": "histogram_quantile($percentile / 100, sum by(le) (increase(mattermost_webapp_largest_contentful_paint_bucket{agent=~\"$agent\",platform=~\"$platform\"}[$__range])))", + "format": "time_series", + "fullMetaSearch": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "[[percentile]]th Percentile", + "range": true, + "refId": "A", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "sum(increase(mattermost_webapp_largest_contentful_paint_count{agent=~\"$agent\",platform=~\"$platform\"}[$__range]))", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "Count", + "range": true, + "refId": "B", + "useBackend": false + } + ], + "title": "LCP (Overall)", + "type": "stat" + }, + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "description": "", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "none" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 10, + "x": 12, + "y": 74 + }, + "id": 18, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "sum(increase(mattermost_webapp_interaction_to_next_paint_count{agent=~\"$agent\",platform=~\"$platform\"}[$decay]))", + "format": "time_series", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": false, + "instant": false, + "interval": "", + "legendFormat": "# of Reports", + "range": true, + "refId": "B", + "useBackend": false + } + ], + "title": "# of CLSs", + "type": "timeseries" + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 82 + }, + "id": 9, + "panels": [], + "title": "Other Metrics", + "type": "row" + } + ], + "refresh": "", + "schemaVersion": 39, + "tags": [], + "templating": { + "list": [ + { + "current": {}, + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "definition": "label_values(mattermost_webapp_channel_switch_count,agent)", + "hide": 0, + "includeAll": true, + "label": "Browser/App", + "multi": true, + "name": "agent", + "options": [], + "query": { + "qryType": 1, + "query": "label_values(mattermost_webapp_channel_switch_count,agent)", + "refId": "PrometheusVariableQueryEditor-VariableQuery" + }, + "refresh": 1, + "regex": "", + "skipUrlSync": false, + "sort": 0, + "type": "query" + }, + { + "current": {}, + "datasource": { + "type": "prometheus", + "uid": "Prometheus" + }, + "definition": "label_values(mattermost_webapp_channel_switch_count,platform)", + "hide": 0, + "includeAll": true, + "label": "OS", + "multi": true, + "name": "platform", + "options": [], + "query": { + "qryType": 1, + "query": "label_values(mattermost_webapp_channel_switch_count,platform)", + "refId": "PrometheusVariableQueryEditor-VariableQuery" + }, + "refresh": 1, + "regex": "", + "skipUrlSync": false, + "sort": 0, + "type": "query" + }, + { + "current": { + "selected": false, + "text": "75", + "value": "75" + }, + "description": "", + "hide": 0, + "includeAll": false, + "label": "Percentile", + "multi": false, + "name": "percentile", + "options": [ + { + "selected": true, + "text": "75", + "value": "75" + }, + { + "selected": false, + "text": "99", + "value": "99" + } + ], + "query": "75,99", + "queryValue": "", + "skipUrlSync": false, + "type": "custom" + }, + { + "current": { + "selected": false, + "text": "30m", + "value": "30m" + }, + "hide": 0, + "includeAll": false, + "label": "Decay Time", + "multi": false, + "name": "decay", + "options": [ + { + "selected": true, + "text": "30m", + "value": "30m" + }, + { + "selected": false, + "text": "1h", + "value": "1h" + }, + { + "selected": false, + "text": "3h", + "value": "3h" + }, + { + "selected": false, + "text": "6h", + "value": "6h" + }, + { + "selected": false, + "text": "12h", + "value": "12h" + }, + { + "selected": false, + "text": "1d", + "value": "1d" + } + ], + "query": "30m,1h,3h,6h,12h,1d", + "queryValue": "", + "skipUrlSync": false, + "type": "custom" + } + ] + }, + "time": { + "from": "now-24h", + "to": "now" + }, + "timepicker": {}, + "timezone": "", + "title": "Web App Metrics", + "uid": "adok91bti9pmof", + "version": 29, + "weekStart": "" +} diff --git a/cmd/mattermost/commands/cmdtestlib.go b/cmd/mattermost/commands/cmdtestlib.go new file mode 100644 index 00000000..e4d8ef1a --- /dev/null +++ b/cmd/mattermost/commands/cmdtestlib.go @@ -0,0 +1,226 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package commands + +import ( + "bytes" + "encoding/json" + "flag" + "fmt" + "io" + "os" + "os/exec" + "path/filepath" + "slices" + "strings" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/mattermost/mattermost/server/public/model" + "github.com/mattermost/mattermost/server/v8/channels/api4" + "github.com/mattermost/mattermost/server/v8/channels/store/storetest/mocks" + "github.com/mattermost/mattermost/server/v8/channels/testlib" +) + +var coverprofileCounters = make(map[string]int) + +var mainHelper *testlib.MainHelper + +type testHelper struct { + *api4.TestHelper + + config *model.Config + tempDir string + configFilePath string + disableAutoConfig bool +} + +// Setup creates an instance of testHelper. +func Setup(tb testing.TB) *testHelper { + dir, err := testlib.SetupTestResources() + if err != nil { + panic("failed to create temporary directory: " + err.Error()) + } + + api4TestHelper := api4.Setup(tb) + + testHelper := &testHelper{ + TestHelper: api4TestHelper, + tempDir: dir, + configFilePath: filepath.Join(dir, "config-helper.json"), + } + + config := &model.Config{} + config.SetDefaults() + testHelper.SetConfig(config) + + return testHelper +} + +// Setup creates an instance of testHelper. +func SetupWithStoreMock(tb testing.TB) *testHelper { + dir, err := testlib.SetupTestResources() + if err != nil { + panic("failed to create temporary directory: " + err.Error()) + } + + api4TestHelper := api4.SetupWithStoreMock(tb) + systemStore := mocks.SystemStore{} + systemStore.On("Get").Return(make(model.StringMap), nil) + licenseStore := mocks.LicenseStore{} + licenseStore.On("Get", "").Return(&model.LicenseRecord{}, nil) + api4TestHelper.App.Srv().Store().(*mocks.Store).On("System").Return(&systemStore) + api4TestHelper.App.Srv().Store().(*mocks.Store).On("License").Return(&licenseStore) + + testHelper := &testHelper{ + TestHelper: api4TestHelper, + tempDir: dir, + configFilePath: filepath.Join(dir, "config-helper.json"), + } + + config := &model.Config{} + config.SetDefaults() + testHelper.SetConfig(config) + + return testHelper +} + +// InitBasic simply proxies to api4.InitBasic, while still returning a testHelper. +func (h *testHelper) InitBasic() *testHelper { + h.TestHelper.InitBasic() + return h +} + +// TemporaryDirectory returns the temporary directory created for user by the test helper. +func (h *testHelper) TemporaryDirectory() string { + return h.tempDir +} + +// Config returns the configuration passed to a running command. +func (h *testHelper) Config() *model.Config { + return h.config.Clone() +} + +// ConfigPath returns the path to the temporary config file passed to a running command. +func (h *testHelper) ConfigPath() string { + return h.configFilePath +} + +// SetConfig replaces the configuration passed to a running command. +func (h *testHelper) SetConfig(config *model.Config) { + if !testing.Short() { + config.SqlSettings = *mainHelper.GetSQLSettings() + } + + // Disable strict password requirements for test + *config.PasswordSettings.MinimumLength = 5 + *config.PasswordSettings.Lowercase = false + *config.PasswordSettings.Uppercase = false + *config.PasswordSettings.Symbol = false + *config.PasswordSettings.Number = false + + h.config = config + + buf, err := json.Marshal(config) + if err != nil { + panic("failed to marshal config: " + err.Error()) + } + if err := os.WriteFile(h.configFilePath, buf, 0600); err != nil { + panic("failed to write file " + h.configFilePath + ": " + err.Error()) + } +} + +// SetAutoConfig configures whether the --config flag is automatically passed to a running command. +func (h *testHelper) SetAutoConfig(autoConfig bool) { + h.disableAutoConfig = !autoConfig +} + +// TearDown cleans up temporary files and assets created during the life of the test helper. +func (h *testHelper) TearDown() { + h.TestHelper.TearDown() + os.RemoveAll(h.tempDir) +} + +func (h *testHelper) execArgs(t *testing.T, args []string) []string { + ret := []string{"-test.v", "-test.run", "ExecCommand"} + if coverprofile := flag.Lookup("test.coverprofile").Value.String(); coverprofile != "" { + dir := filepath.Dir(coverprofile) + base := filepath.Base(coverprofile) + baseParts := strings.SplitN(base, ".", 2) + name := strings.Replace(t.Name(), "/", "_", -1) + coverprofileCounters[name] = coverprofileCounters[name] + 1 + baseParts[0] = fmt.Sprintf("%v-%v-%v", baseParts[0], name, coverprofileCounters[name]) + ret = append(ret, "-test.coverprofile", filepath.Join(dir, strings.Join(baseParts, "."))) + } + + ret = append(ret, "--") + + // Unless the test passes a `--config` of its own, create a temporary one from the default + // configuration with the current test database applied. + hasConfig := h.disableAutoConfig + if slices.Contains(args, "--config") { + hasConfig = true + } + + if !hasConfig { + ret = append(ret, "--config", h.configFilePath) + } + + ret = append(ret, args...) + + return ret +} + +func (h *testHelper) cmd(t *testing.T, args []string) *exec.Cmd { + path, err := os.Executable() + require.NoError(t, err) + cmd := exec.Command(path, h.execArgs(t, args)...) + + cmd.Env = []string{} + for _, env := range os.Environ() { + // Ignore MM_SQLSETTINGS_DATASOURCE from the environment, since we override. + if strings.HasPrefix(env, "MM_SQLSETTINGS_DATASOURCE=") { + continue + } + + cmd.Env = append(cmd.Env, env) + } + + return cmd +} + +// CheckCommand invokes the test binary, returning the output modified for assertion testing. +func (h *testHelper) CheckCommand(t *testing.T, args ...string) string { + output, err := h.cmd(t, args).CombinedOutput() + require.NoError(t, err, string(output)) + return strings.TrimSpace(strings.TrimSuffix(strings.TrimSpace(string(output)), "PASS")) +} + +// RunCommand invokes the test binary, returning only any error. +func (h *testHelper) RunCommand(t *testing.T, args ...string) error { + return h.cmd(t, args).Run() +} + +// RunCommandWithOutput is a variant of RunCommand that returns the unmodified output and any error. +func (h *testHelper) RunCommandWithOutput(t *testing.T, args ...string) (string, error) { + cmd := h.cmd(t, args) + + var buf bytes.Buffer + reader, writer := io.Pipe() + cmd.Stdout = writer + cmd.Stderr = writer + + done := make(chan bool) + go func() { + io.Copy(&buf, reader) + close(done) + }() + + err := cmd.Run() + writer.Close() + <-done + + return buf.String(), err +} diff --git a/cmd/mattermost/commands/db.go b/cmd/mattermost/commands/db.go new file mode 100644 index 00000000..17c96c0c --- /dev/null +++ b/cmd/mattermost/commands/db.go @@ -0,0 +1,347 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package commands + +import ( + "bytes" + "encoding/json" + "fmt" + "strconv" + "strings" + + "github.com/pkg/errors" + "github.com/spf13/cobra" + + "github.com/mattermost/mattermost/server/public/model" + "github.com/mattermost/mattermost/server/public/shared/mlog" + "github.com/mattermost/mattermost/server/v8/channels/store/sqlstore" + "github.com/mattermost/mattermost/server/v8/config" + "github.com/mattermost/mattermost/server/v8/platform/shared/filestore" + "github.com/mattermost/morph" + "github.com/mattermost/morph/models" +) + +var DbCmd = &cobra.Command{ + Use: "db", + Short: "Commands related to the database", +} + +var InitDbCmd = &cobra.Command{ + Use: "init", + Short: "Initialize the database", + Long: `Initialize the database for a given DSN, executing the migrations and loading the custom defaults if any. + +This command should be run using a database configuration DSN.`, + Example: ` # you can use the config flag to pass the DSN + $ mattermost db init --config postgres://localhost/mattermost + + # or you can use the MM_CONFIG environment variable + $ MM_CONFIG=postgres://localhost/mattermost mattermost db init + + # and you can set a custom defaults file to be loaded into the database + $ MM_CUSTOM_DEFAULTS_PATH=custom.json MM_CONFIG=postgres://localhost/mattermost mattermost db init`, + Args: cobra.NoArgs, + RunE: initDbCmdF, +} + +var ResetCmd = &cobra.Command{ + Use: "reset", + Short: "Reset the database to initial state", + Long: "Completely erases the database causing the loss of all data. This will reset Mattermost to its initial state.", + RunE: resetCmdF, +} + +var MigrateCmd = &cobra.Command{ + Use: "migrate", + Short: "Migrate the database if there are any unapplied migrations", + Long: "Run the missing migrations from the migrations table.", + RunE: migrateCmdF, +} + +var DowngradeCmd = &cobra.Command{ + Use: "downgrade", + Short: "Downgrade the database with the given plan or migration numbers", + Long: "Downgrade the database with the given plan or migration numbers. " + + "The plan will be read from filestore hence the path should be relative to file store root.", + RunE: downgradeCmdF, + Args: cobra.ExactArgs(1), +} + +var DBVersionCmd = &cobra.Command{ + Use: "version", + Short: "Returns the recent applied version number", + RunE: dbVersionCmdF, +} + +func init() { + ResetCmd.Flags().Bool("confirm", false, "Confirm you really want to delete everything and a DB backup has been performed.") + DBVersionCmd.Flags().Bool("all", false, "Returns all applied migrations") + MigrateCmd.Flags().Bool("auto-recover", false, "Recover the database to it's existing state after a failed migration.") + MigrateCmd.Flags().Bool("save-plan", false, "Saves the migration plan into file store so that it can be used in the future.") + MigrateCmd.Flags().Bool("dry-run", false, "Runs the migration plan without applying it.") + + DowngradeCmd.Flags().Bool("auto-recover", false, "Recover the database to it's existing state after a failed migration.") + DowngradeCmd.Flags().Bool("dry-run", false, "Runs the migration plan without applying it.") + + DbCmd.AddCommand( + InitDbCmd, + ResetCmd, + MigrateCmd, + DowngradeCmd, + DBVersionCmd, + ) + + RootCmd.AddCommand( + DbCmd, + ) +} + +func initDbCmdF(command *cobra.Command, _ []string) error { + logger := mlog.CreateConsoleLogger() + + dsn := getConfigDSN(command, config.GetEnvironment()) + if !config.IsDatabaseDSN(dsn) { + return errors.New("this command should be run using a database configuration DSN") + } + + customDefaults, err := loadCustomDefaults() + if err != nil { + return errors.Wrap(err, "error loading custom configuration defaults") + } + + configStore, err := config.NewStoreFromDSN(getConfigDSN(command, config.GetEnvironment()), false, customDefaults, true) + if err != nil { + return errors.Wrap(err, "failed to load configuration") + } + defer configStore.Close() + + sqlStore, err := sqlstore.New(configStore.Get().SqlSettings, logger, nil) + if err != nil { + return errors.Wrap(err, "failed to initialize store") + } + defer sqlStore.Close() + + CommandPrettyPrintln("Database store correctly initialised") + + return nil +} + +func resetCmdF(command *cobra.Command, args []string) error { + logger := mlog.CreateConsoleLogger() + + ss, err := initStoreCommandContextCobra(logger, command) + if err != nil { + return errors.Wrap(err, "could not initialize store") + } + defer ss.Close() + + confirmFlag, _ := command.Flags().GetBool("confirm") + if !confirmFlag { + var confirm string + CommandPrettyPrintln("Have you performed a database backup? (YES/NO): ") + fmt.Scanln(&confirm) + + if confirm != "YES" { + return errors.New("ABORTED: You did not answer YES exactly, in all capitals.") + } + CommandPrettyPrintln("Are you sure you want to delete everything? All data will be permanently deleted? (YES/NO): ") + fmt.Scanln(&confirm) + if confirm != "YES" { + return errors.New("ABORTED: You did not answer YES exactly, in all capitals.") + } + } + + ss.DropAllTables() + + CommandPrettyPrintln("Database successfully reset") + + return nil +} + +func migrateCmdF(command *cobra.Command, args []string) error { + logger := mlog.CreateConsoleLogger() + defer logger.Shutdown() + + cfgDSN := getConfigDSN(command, config.GetEnvironment()) + recoverFlag, _ := command.Flags().GetBool("auto-recover") + savePlan, _ := command.Flags().GetBool("save-plan") + dryRun, _ := command.Flags().GetBool("dry-run") + cfgStore, err := config.NewStoreFromDSN(cfgDSN, true, nil, true) + if err != nil { + return errors.Wrap(err, "failed to load configuration") + } + config := cfgStore.Get() + + migrator, err := sqlstore.NewMigrator(config.SqlSettings, logger, dryRun) + if err != nil { + return errors.Wrap(err, "failed to create migrator") + } + defer migrator.Close() + + plan, err := migrator.GeneratePlan(recoverFlag) + if err != nil { + return errors.Wrap(err, "failed to generate migration plan") + } + + if len(plan.Migrations) == 0 { + CommandPrettyPrintln("No migrations to apply.") + return nil + } + + if savePlan || recoverFlag { + backend, err2 := filestore.NewFileBackend(ConfigToFileBackendSettings(&config.FileSettings, false, true)) + if err2 != nil { + return fmt.Errorf("failed to initialize filebackend: %w", err2) + } + + b, mErr := json.MarshalIndent(plan, "", " ") + if mErr != nil { + return fmt.Errorf("failed to marshal plan: %w", mErr) + } + + fileName, err2 := migrator.GetFileName(plan) + if err2 != nil { + return fmt.Errorf("failed to generate plan file: %w", err2) + } + + _, err = backend.WriteFile(bytes.NewReader(b), fileName+".json") + if err != nil { + return fmt.Errorf("failed to write migration plan: %w", err) + } + + CommandPrettyPrintln( + fmt.Sprintf("%s\nThe migration plan has been saved. File: %q.\nNote that "+ + " migration plan is saved into file store, so the filepath will be relative to root of file store\n%s", + strings.Repeat("*", 80), fileName+".json", strings.Repeat("*", 80))) + } + + err = migrator.MigrateWithPlan(plan, dryRun) + if err != nil { + return errors.Wrap(err, "failed to migrate with the plan") + } + + CommandPrettyPrintln("Database successfully migrated") + + return nil +} + +func downgradeCmdF(command *cobra.Command, args []string) error { + logger := mlog.CreateConsoleLogger() + defer logger.Shutdown() + + cfgDSN := getConfigDSN(command, config.GetEnvironment()) + cfgStore, err := config.NewStoreFromDSN(cfgDSN, true, nil, true) + if err != nil { + return errors.Wrap(err, "failed to load configuration") + } + config := cfgStore.Get() + + dryRun, _ := command.Flags().GetBool("dry-run") + recoverFlag, _ := command.Flags().GetBool("auto-recover") + + backend, err2 := filestore.NewFileBackend(ConfigToFileBackendSettings(&config.FileSettings, false, true)) + if err2 != nil { + return fmt.Errorf("failed to initialize filebackend: %w", err2) + } + + migrator, err := sqlstore.NewMigrator(config.SqlSettings, logger, dryRun) + if err != nil { + return errors.Wrap(err, "failed to create migrator") + } + defer migrator.Close() + + // check if the input is version numbers or a file + // if the input is given as a file, we assume it's a migration plan + versions := strings.Split(args[0], ",") + if _, sErr := strconv.Atoi(versions[0]); sErr == nil { + CommandPrettyPrintln("Database will be downgraded with the following versions: ", versions) + + err = migrator.DowngradeMigrations(dryRun, versions...) + if err != nil { + return errors.Wrap(err, "failed to downgrade migrations") + } + + CommandPrettyPrintln("Database successfully downgraded") + return nil + } + + b, err := backend.ReadFile(args[0]) + if err != nil { + return fmt.Errorf("failed to read plan: %w", err) + } + + var plan models.Plan + err = json.Unmarshal(b, &plan) + if err != nil { + return fmt.Errorf("failed to unmarshal plan: %w", err) + } + + morph.SwapPlanDirection(&plan) + plan.Auto = recoverFlag + + err = migrator.MigrateWithPlan(&plan, dryRun) + if err != nil { + return errors.Wrap(err, "failed to migrate with the plan") + } + + CommandPrettyPrintln("Database successfully downgraded") + + return nil +} + +func dbVersionCmdF(command *cobra.Command, args []string) error { + logger := mlog.CreateConsoleLogger() + defer logger.Shutdown() + + ss, err := initStoreCommandContextCobra(logger, command) + if err != nil { + return errors.Wrap(err, "could not initialize store") + } + defer ss.Close() + + allFlag, _ := command.Flags().GetBool("all") + if allFlag { + applied, err2 := ss.GetAppliedMigrations() + if err2 != nil { + return errors.Wrap(err2, "failed to get applied migrations") + } + for _, migration := range applied { + CommandPrettyPrintln(fmt.Sprintf("Varsion: %d, Name: %s", migration.Version, migration.Name)) + } + return nil + } + + v, err := ss.GetDBSchemaVersion() + if err != nil { + return errors.Wrap(err, "failed to get schema version") + } + + CommandPrettyPrintln("Current database schema version is: " + strconv.Itoa(v)) + + return nil +} + +func ConfigToFileBackendSettings(s *model.FileSettings, enableComplianceFeature bool, skipVerify bool) filestore.FileBackendSettings { + if *s.DriverName == model.ImageDriverLocal { + return filestore.FileBackendSettings{ + DriverName: *s.DriverName, + Directory: *s.Directory, + } + } + return filestore.FileBackendSettings{ + DriverName: *s.DriverName, + AmazonS3AccessKeyId: *s.AmazonS3AccessKeyId, + AmazonS3SecretAccessKey: *s.AmazonS3SecretAccessKey, + AmazonS3Bucket: *s.AmazonS3Bucket, + AmazonS3PathPrefix: *s.AmazonS3PathPrefix, + AmazonS3Region: *s.AmazonS3Region, + AmazonS3Endpoint: *s.AmazonS3Endpoint, + AmazonS3SSL: s.AmazonS3SSL == nil || *s.AmazonS3SSL, + AmazonS3SignV2: s.AmazonS3SignV2 != nil && *s.AmazonS3SignV2, + AmazonS3SSE: s.AmazonS3SSE != nil && *s.AmazonS3SSE && enableComplianceFeature, + AmazonS3Trace: s.AmazonS3Trace != nil && *s.AmazonS3Trace, + AmazonS3RequestTimeoutMilliseconds: *s.AmazonS3RequestTimeoutMilliseconds, + SkipVerify: skipVerify, + } +} diff --git a/cmd/mattermost/commands/exec_command_test.go b/cmd/mattermost/commands/exec_command_test.go new file mode 100644 index 00000000..a5eed09a --- /dev/null +++ b/cmd/mattermost/commands/exec_command_test.go @@ -0,0 +1,19 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package commands + +import ( + "flag" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestExecCommand(t *testing.T) { + if filter := flag.Lookup("test.run").Value.String(); filter != "ExecCommand" { + t.Skip("use -run ExecCommand to execute a command via the test executable") + } + RootCmd.SetArgs(flag.Args()) + require.NoError(t, RootCmd.Execute()) +} diff --git a/cmd/mattermost/commands/export.go b/cmd/mattermost/commands/export.go new file mode 100644 index 00000000..3dca195c --- /dev/null +++ b/cmd/mattermost/commands/export.go @@ -0,0 +1,187 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package commands + +import ( + "context" + "os" + "path/filepath" + "time" + + "github.com/mattermost/mattermost/server/public/model" + "github.com/mattermost/mattermost/server/public/shared/request" + "github.com/mattermost/mattermost/server/v8/channels/app" + + "github.com/pkg/errors" + "github.com/spf13/cobra" +) + +var ExportCmd = &cobra.Command{ + Use: "export", + Short: "Export data from Mattermost", + Long: "Export data from Mattermost in a format suitable for import into a third-party application or another Mattermost instance", +} + +var ScheduleExportCmd = &cobra.Command{ + Use: "schedule", + Short: "Schedule an export data job in Mattermost", + Long: "Schedule an export data job in Mattermost (this will run asynchronously via a background worker)", + Example: "export schedule --format=actiance --exportFrom=12345 --timeoutSeconds=12345", + RunE: scheduleExportCmdF, +} + +var BulkExportCmd = &cobra.Command{ + Use: "bulk [file]", + Short: "Export bulk data.", + Long: "Export data to a file compatible with the Mattermost Bulk Import format.", + Example: "export bulk bulk_data.json", + RunE: bulkExportCmdF, + Args: cobra.ExactArgs(1), +} + +func init() { + ScheduleExportCmd.Flags().String("format", "actiance", "The format to export data") + ScheduleExportCmd.Flags().Int64("exportFrom", -1, "The timestamp of the earliest post to export, expressed in seconds since the unix epoch.") + ScheduleExportCmd.Flags().Int("timeoutSeconds", -1, "The maximum number of seconds to wait for the job to complete before timing out.") + + BulkExportCmd.Flags().Bool("all-teams", true, "Export all teams from the server.") + BulkExportCmd.Flags().Bool("with-archived-channels", false, "Also exports archived channels.") + BulkExportCmd.Flags().Bool("with-profile-pictures", false, "Also exports profile pictures.") + BulkExportCmd.Flags().Bool("attachments", false, "Also export file attachments.") + BulkExportCmd.Flags().Bool("archive", false, "Outputs a single archive file.") + + ExportCmd.AddCommand(ScheduleExportCmd) + ExportCmd.AddCommand(BulkExportCmd) + + RootCmd.AddCommand(ExportCmd) +} + +func scheduleExportCmdF(command *cobra.Command, args []string) error { + a, err := InitDBCommandContextCobra(command, app.SkipPostInitialization()) + if err != nil { + return err + } + defer a.Srv().Shutdown() + + if !*a.Config().MessageExportSettings.EnableExport { + return errors.New("ERROR: The message export feature is not enabled") + } + + var rctx request.CTX = request.EmptyContext(a.Log()) + + // for now, format is hard-coded to actiance. In time, we'll have to support other formats and inject them into job data + format, err := command.Flags().GetString("format") + if err != nil { + return errors.New("format flag error") + } + if format != "actiance" { + return errors.New("unsupported export format") + } + + startTime, err := command.Flags().GetInt64("exportFrom") + if err != nil { + return errors.New("exportFrom flag error") + } + if startTime < 0 { + return errors.New("exportFrom must be a positive integer") + } + + timeoutSeconds, err := command.Flags().GetInt("timeoutSeconds") + if err != nil { + return errors.New("timeoutSeconds error") + } + if timeoutSeconds < 0 { + return errors.New("timeoutSeconds must be a positive integer") + } + + if messageExportI := a.MessageExport(); messageExportI != nil { + ctx := context.Background() + if timeoutSeconds > 0 { + var cancel context.CancelFunc + ctx, cancel = context.WithTimeout(ctx, time.Second*time.Duration(timeoutSeconds)) + defer cancel() + } + + rctx = rctx.WithContext(ctx) + + job, err := messageExportI.StartSynchronizeJob(rctx, startTime) + if err != nil || job.Status == model.JobStatusError || job.Status == model.JobStatusCanceled { + CommandPrintErrorln("ERROR: Message export job failed. Please check the server logs") + } else { + CommandPrettyPrintln("SUCCESS: Message export job complete") + + auditRec := a.MakeAuditRecord(rctx, model.AuditEventScheduleExport, model.AuditStatusSuccess) + auditRec.AddMeta("format", format) + auditRec.AddMeta("start", startTime) + a.LogAuditRec(rctx, auditRec, nil) + } + } + return nil +} + +func bulkExportCmdF(command *cobra.Command, args []string) error { + a, err := InitDBCommandContextCobra(command, app.SkipPostInitialization()) + if err != nil { + return err + } + defer a.Srv().Shutdown() + + rctx := request.EmptyContext(a.Log()) + + allTeams, err := command.Flags().GetBool("all-teams") + if err != nil { + return errors.Wrap(err, "all-teams flag error") + } + if !allTeams { + return errors.New("Nothing to export. Please specify the --all-teams flag to export all teams.") + } + + attachments, err := command.Flags().GetBool("attachments") + if err != nil { + return errors.Wrap(err, "attachments flag error") + } + + archive, err := command.Flags().GetBool("archive") + if err != nil { + return errors.Wrap(err, "archive flag error") + } + + withArchivedChannels, err := command.Flags().GetBool("with-archived-channels") + if err != nil { + return errors.Wrap(err, "with-archived-channels flag error") + } + + includeProfilePictures, err := command.Flags().GetBool("with-profile-pictures") + if err != nil { + return errors.Wrap(err, "with-profile-pictures flag error") + } + + fileWriter, err := os.Create(args[0]) + if err != nil { + return err + } + defer fileWriter.Close() + + outPath, err := filepath.Abs(args[0]) + if err != nil { + return err + } + + var opts model.BulkExportOpts + opts.IncludeAttachments = attachments + opts.CreateArchive = archive + opts.IncludeArchivedChannels = withArchivedChannels + opts.IncludeProfilePictures = includeProfilePictures + if err := a.BulkExport(rctx, fileWriter, filepath.Dir(outPath), nil /* nil job since it's spawned from CLI */, opts); err != nil { + CommandPrintErrorln(err.Error()) + return err + } + + auditRec := a.MakeAuditRecord(rctx, model.AuditEventBulkExport, model.AuditStatusSuccess) + auditRec.AddMeta("all_teams", allTeams) + auditRec.AddMeta("file", args[0]) + a.LogAuditRec(rctx, auditRec, nil) + + return nil +} diff --git a/cmd/mattermost/commands/import.go b/cmd/mattermost/commands/import.go new file mode 100644 index 00000000..85448c62 --- /dev/null +++ b/cmd/mattermost/commands/import.go @@ -0,0 +1,189 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package commands + +import ( + "errors" + "fmt" + "os" + + "github.com/spf13/cobra" + + "github.com/mattermost/mattermost/server/public/model" + "github.com/mattermost/mattermost/server/public/shared/request" + "github.com/mattermost/mattermost/server/v8/channels/app" +) + +var ImportCmd = &cobra.Command{ + Use: "import", + Short: "Import data.", +} + +var SlackImportCmd = &cobra.Command{ + Use: "slack [team] [file]", + Short: "Import a team from Slack.", + Long: "Import a team from a Slack export zip file.", + Example: " import slack myteam slack_export.zip", + RunE: slackImportCmdF, +} + +var BulkImportCmd = &cobra.Command{ + Use: "bulk [file]", + Short: "Import bulk data.", + Long: "Import data from a Mattermost Bulk Import File.", + Example: " import bulk bulk_data.json", + RunE: bulkImportCmdF, +} + +func init() { + BulkImportCmd.Flags().Bool("apply", false, "Save the import data to the database. Use with caution - this cannot be reverted.") + BulkImportCmd.Flags().Bool("validate", false, "Validate the import data without making any changes to the system.") + BulkImportCmd.Flags().Int("workers", 2, "How many workers to run whilst doing the import.") + BulkImportCmd.Flags().String("import-path", "", "A path to the data directory to import files from.") + + ImportCmd.AddCommand( + BulkImportCmd, + SlackImportCmd, + ) + RootCmd.AddCommand(ImportCmd) +} + +func slackImportCmdF(command *cobra.Command, args []string) error { + a, err := InitDBCommandContextCobra(command) + if err != nil { + return err + } + defer a.Srv().Shutdown() + + rctx := request.EmptyContext(a.Log()) + + if len(args) != 2 { + return errors.New("Incorrect number of arguments.") + } + + team := getTeamFromTeamArg(a, args[0]) + if team == nil { + return errors.New("Unable to find team '" + args[0] + "'") + } + + fileReader, err := os.Open(args[1]) + if err != nil { + return err + } + defer fileReader.Close() + + fileInfo, err := fileReader.Stat() + if err != nil { + return err + } + + CommandPrettyPrintln("Running Slack Import. This may take a long time for large teams or teams with many messages.") + + importErr, log := a.SlackImport(rctx, fileReader, fileInfo.Size(), team.Id) + + if importErr != nil { + return err + } + + CommandPrettyPrintln("") + CommandPrintln(log.String()) + CommandPrettyPrintln("") + + CommandPrettyPrintln("Finished Slack Import.") + CommandPrettyPrintln("") + + auditRec := a.MakeAuditRecord(rctx, model.AuditEventSlackImport, model.AuditStatusSuccess) + auditRec.AddMeta("team", team) + auditRec.AddMeta("file", args[1]) + a.LogAuditRec(rctx, auditRec, nil) + + return nil +} + +func bulkImportCmdF(command *cobra.Command, args []string) error { + a, err := InitDBCommandContextCobra(command) + if err != nil { + return err + } + defer a.Srv().Shutdown() + + rctx := request.EmptyContext(a.Log()) + + apply, err := command.Flags().GetBool("apply") + if err != nil { + return errors.New("Apply flag error") + } + + validate, err := command.Flags().GetBool("validate") + if err != nil { + return errors.New("Validate flag error") + } + + workers, err := command.Flags().GetInt("workers") + if err != nil { + return errors.New("Workers flag error") + } + + importPath, err := command.Flags().GetString("import-path") + if err != nil { + return errors.New("import-path flag error") + } + + if len(args) != 1 { + return errors.New("Incorrect number of arguments.") + } + + fileReader, err := os.Open(args[0]) + if err != nil { + return err + } + defer fileReader.Close() + + if apply && validate { + CommandPrettyPrintln("Use only one of --apply or --validate.") + return nil + } + + if apply && !validate { + CommandPrettyPrintln("Running Bulk Import. This may take a long time.") + } else { + CommandPrettyPrintln("Running Bulk Import Data Validation.") + CommandPrettyPrintln("** This checks the validity of the entities in the data file, but does not persist any changes **") + CommandPrettyPrintln("Use the --apply flag to perform the actual data import.") + } + + CommandPrettyPrintln("") + + if lineNumber, err := a.BulkImportWithPath(rctx, fileReader, nil, true, !apply, workers, importPath); err != nil { + CommandPrintErrorln(err.Error()) + if lineNumber != 0 { + CommandPrintErrorln(fmt.Sprintf("Error occurred on data file line %v", lineNumber)) + } + return err + } + + if apply { + CommandPrettyPrintln("Finished Bulk Import.") + auditRec := a.MakeAuditRecord(rctx, model.AuditEventBulkImport, model.AuditStatusSuccess) + auditRec.AddMeta("file", args[0]) + a.LogAuditRec(rctx, auditRec, nil) + } else { + CommandPrettyPrintln("Validation complete. You can now perform the import by rerunning this command with the --apply flag.") + } + + return nil +} + +func getTeamFromTeamArg(a *app.App, teamArg string) *model.Team { + var team *model.Team + team, err := a.Srv().Store().Team().GetByName(teamArg) + + if err != nil { + var t *model.Team + if t, err = a.Srv().Store().Team().Get(teamArg); err == nil { + team = t + } + } + return team +} diff --git a/cmd/mattermost/commands/init.go b/cmd/mattermost/commands/init.go new file mode 100644 index 00000000..70e9e095 --- /dev/null +++ b/cmd/mattermost/commands/init.go @@ -0,0 +1,69 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package commands + +import ( + "github.com/pkg/errors" + "github.com/spf13/cobra" + + "github.com/mattermost/mattermost/server/public/model" + "github.com/mattermost/mattermost/server/public/shared/i18n" + "github.com/mattermost/mattermost/server/public/shared/mlog" + "github.com/mattermost/mattermost/server/public/shared/request" + "github.com/mattermost/mattermost/server/v8/channels/app" + "github.com/mattermost/mattermost/server/v8/channels/store" + "github.com/mattermost/mattermost/server/v8/channels/store/sqlstore" + "github.com/mattermost/mattermost/server/v8/channels/utils" + "github.com/mattermost/mattermost/server/v8/config" +) + +func initDBCommandContextCobra(command *cobra.Command, readOnlyConfigStore bool, options ...app.Option) (*app.App, error) { + a, err := initDBCommandContext(getConfigDSN(command, config.GetEnvironment()), readOnlyConfigStore, options...) + if err != nil { + // Returning an error just prints the usage message, so actually panic + panic(err) + } + + a.InitPlugins(request.EmptyContext(a.Log()), *a.Config().PluginSettings.Directory, *a.Config().PluginSettings.ClientDirectory) + a.DoAppMigrations() + + return a, nil +} + +func InitDBCommandContextCobra(command *cobra.Command, options ...app.Option) (*app.App, error) { + return initDBCommandContextCobra(command, true, options...) +} + +func initDBCommandContext(configDSN string, readOnlyConfigStore bool, options ...app.Option) (*app.App, error) { + if err := utils.TranslationsPreInit(); err != nil { + return nil, err + } + model.AppErrorInit(i18n.T) + + // The option order is important as app.Config option reads app.StartMetrics option. + options = append(options, app.Config(configDSN, readOnlyConfigStore, nil)) + s, err := app.NewServer(options...) + if err != nil { + return nil, err + } + + a := app.New(app.ServerConnector(s.Channels())) + + if model.BuildEnterpriseReady == "true" { + a.Srv().LoadLicense() + } + + return a, nil +} + +func initStoreCommandContextCobra(logger mlog.LoggerIFace, command *cobra.Command) (store.Store, error) { + cfgDSN := getConfigDSN(command, config.GetEnvironment()) + cfgStore, err := config.NewStoreFromDSN(cfgDSN, true, nil, true) + if err != nil { + return nil, errors.Wrap(err, "failed to load configuration") + } + + config := cfgStore.Get() + return sqlstore.New(config.SqlSettings, logger, nil) +} diff --git a/cmd/mattermost/commands/jobserver.go b/cmd/mattermost/commands/jobserver.go new file mode 100644 index 00000000..1cc71e82 --- /dev/null +++ b/cmd/mattermost/commands/jobserver.go @@ -0,0 +1,74 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package commands + +import ( + "os" + "os/signal" + "syscall" + + "github.com/spf13/cobra" + + "github.com/mattermost/mattermost/server/public/model" + "github.com/mattermost/mattermost/server/public/shared/request" + "github.com/mattermost/mattermost/server/v8/channels/app" + "github.com/mattermost/mattermost/server/v8/config" +) + +var JobserverCmd = &cobra.Command{ + Use: "jobserver", + Short: "Start the Mattermost job server", + RunE: jobserverCmdF, +} + +func init() { + JobserverCmd.Flags().Bool("nojobs", false, "Do not run jobs on this jobserver.") + JobserverCmd.Flags().Bool("noschedule", false, "Do not schedule jobs from this jobserver.") + + RootCmd.AddCommand(JobserverCmd) +} + +func jobserverCmdF(command *cobra.Command, args []string) error { + // Options + noJobs, _ := command.Flags().GetBool("nojobs") + noSchedule, _ := command.Flags().GetBool("noschedule") + + // Initialize + a, err := initDBCommandContext(getConfigDSN(command, config.GetEnvironment()), false, app.StartMetrics) + if err != nil { + return err + } + defer a.Srv().Shutdown() + + a.Srv().LoadLicense() + + rctx := request.EmptyContext(a.Log()) + + // Run jobs + rctx.Logger().Info("Starting Mattermost job server") + defer rctx.Logger().Info("Stopped Mattermost job server") + + if !noJobs { + a.Srv().Jobs.StartWorkers() + defer a.Srv().Jobs.StopWorkers() + } + if !noSchedule { + a.Srv().Jobs.StartSchedulers() + defer a.Srv().Jobs.StopSchedulers() + } + + if !noJobs || !noSchedule { + auditRec := a.MakeAuditRecord(rctx, model.AuditEventJobServer, model.AuditStatusSuccess) + a.LogAuditRec(rctx, auditRec, nil) + } + + signalChan := make(chan os.Signal, 1) + signal.Notify(signalChan, os.Interrupt, syscall.SIGINT, syscall.SIGTERM) + <-signalChan + + // Cleanup anything that isn't handled by a defer statement + rctx.Logger().Info("Stopping Mattermost job server") + + return nil +} diff --git a/cmd/mattermost/commands/main_test.go b/cmd/mattermost/commands/main_test.go new file mode 100644 index 00000000..90444e4c --- /dev/null +++ b/cmd/mattermost/commands/main_test.go @@ -0,0 +1,100 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package commands + +import ( + "flag" + "os" + "testing" + + "github.com/mattermost/mattermost/server/public/model" + "github.com/mattermost/mattermost/server/v8/channels/api4" + "github.com/mattermost/mattermost/server/v8/channels/testlib" +) + +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +type TestConfig struct { + TestServiceSettings TestServiceSettings + TestTeamSettings TestTeamSettings + TestClientRequirements TestClientRequirements + TestMessageExportSettings TestMessageExportSettings +} + +type TestMessageExportSettings struct { + Enableexport bool + Exportformat string + TestGlobalRelaySettings TestGlobalRelaySettings +} + +type TestGlobalRelaySettings struct { + Customertype string + Smtpusername string + Smtppassword string +} + +type TestServiceSettings struct { + Siteurl string + Websocketurl string + Licensedfieldlocation string +} + +type TestTeamSettings struct { + Sitename string + Maxuserperteam int +} + +type TestClientRequirements struct { + Androidlatestversion string + Androidminversion string + Desktoplatestversion string +} + +type TestNewConfig struct { + TestNewServiceSettings TestNewServiceSettings + TestNewTeamSettings TestNewTeamSettings +} + +type TestNewServiceSettings struct { + SiteUrl *string + UseLetsEncrypt *bool + TLSStrictTransportMaxAge *int64 + AllowedThemes []string +} + +type TestNewTeamSettings struct { + SiteName *string + MaxUserPerTeam *int +} + +type TestPluginSettings struct { + Enable *bool + Directory *string `restricted:"true"` + Plugins map[string]map[string]any + PluginStates map[string]*model.PluginState + SignaturePublicKeyFiles []string +} + +func TestMain(m *testing.M) { + // Command tests are run by re-invoking the test binary in question, so avoid creating + // another container when we detect same. + flag.Parse() + if filter := flag.Lookup("test.run").Value.String(); filter == "ExecCommand" { + status := m.Run() + os.Exit(status) + return + } + + var options = testlib.HelperOptions{ + EnableStore: true, + EnableResources: true, + } + + mainHelper = testlib.NewMainHelperWithOptions(&options) + defer mainHelper.Close() + api4.SetMainHelper(mainHelper) + + mainHelper.Main(m) +} diff --git a/cmd/mattermost/commands/output.go b/cmd/mattermost/commands/output.go new file mode 100644 index 00000000..bf4584a7 --- /dev/null +++ b/cmd/mattermost/commands/output.go @@ -0,0 +1,21 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package commands + +import ( + "fmt" + "os" +) + +func CommandPrintln(a ...any) (int, error) { + return fmt.Println(a...) +} + +func CommandPrintErrorln(a ...any) (int, error) { + return fmt.Fprintln(os.Stderr, a...) +} + +func CommandPrettyPrintln(a ...any) (int, error) { + return fmt.Fprintln(os.Stdout, a...) +} diff --git a/cmd/mattermost/commands/root.go b/cmd/mattermost/commands/root.go new file mode 100644 index 00000000..692ac30f --- /dev/null +++ b/cmd/mattermost/commands/root.go @@ -0,0 +1,38 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package commands + +import ( + "os" + + "github.com/mattermost/mattermost/server/public/shared/mlog" + "github.com/spf13/cobra" +) + +type Command = cobra.Command + +func Run(args []string) error { + RootCmd.SetArgs(args) + return RootCmd.Execute() +} + +var RootCmd = &cobra.Command{ + Use: "mattermost", + Short: "Open source, self-hosted Slack-alternative", + Long: `Mattermost offers workplace messaging across web, PC and phones with archiving, search and integration with your existing systems. Documentation available at https://docs.mattermost.com`, + PersistentPreRun: func(cmd *cobra.Command, args []string) { + checkForRootUser() + }, +} + +func init() { + RootCmd.PersistentFlags().StringP("config", "c", "", "Configuration file to use.") +} + +// checkForRootUser logs a warning if the process is running as root +func checkForRootUser() { + if os.Geteuid() == 0 { + mlog.Warn("Running Mattermost as root is not recommended. Please use a non-root user.") + } +} diff --git a/cmd/mattermost/commands/server.go b/cmd/mattermost/commands/server.go new file mode 100644 index 00000000..76d33bfa --- /dev/null +++ b/cmd/mattermost/commands/server.go @@ -0,0 +1,151 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package commands + +import ( + "bytes" + "net" + "os" + "os/signal" + "runtime/debug" + "runtime/pprof" + "syscall" + + "github.com/pkg/errors" + "github.com/spf13/cobra" + + "github.com/mattermost/mattermost/server/public/shared/mlog" + "github.com/mattermost/mattermost/server/v8/channels/api4" + "github.com/mattermost/mattermost/server/v8/channels/app" + "github.com/mattermost/mattermost/server/v8/channels/utils" + "github.com/mattermost/mattermost/server/v8/channels/web" + "github.com/mattermost/mattermost/server/v8/channels/wsapi" + "github.com/mattermost/mattermost/server/v8/config" +) + +var serverCmd = &cobra.Command{ + Use: "server", + Short: "Run the Mattermost server", + RunE: serverCmdF, + SilenceUsage: true, +} + +func init() { + RootCmd.AddCommand(serverCmd) + RootCmd.RunE = serverCmdF +} + +func serverCmdF(command *cobra.Command, args []string) error { + interruptChan := make(chan os.Signal, 1) + + if err := utils.TranslationsPreInit(); err != nil { + return errors.Wrap(err, "unable to load Mattermost translation files") + } + + customDefaults, err := loadCustomDefaults() + if err != nil { + mlog.Warn("Error loading custom configuration defaults: " + err.Error()) + } + + configStore, err := config.NewStoreFromDSN(getConfigDSN(command, config.GetEnvironment()), false, customDefaults, true) + if err != nil { + return errors.Wrap(err, "failed to load configuration") + } + defer configStore.Close() + + return runServer(configStore, interruptChan) +} + +func runServer(configStore *config.Store, interruptChan chan os.Signal) error { + // Setting the highest traceback level from the code. + // This is done to print goroutines from all threads (see golang.org/issue/13161) + // and also preserve a crash dump for later investigation. + debug.SetTraceback("crash") + + options := []app.Option{ + // The option order is important as app.Config option reads app.StartMetrics option. + app.StartMetrics, + app.ConfigStore(configStore), + app.RunEssentialJobs, + app.JoinCluster, + } + server, err := app.NewServer(options...) + if err != nil { + mlog.Error(err.Error()) + return err + } + defer server.Shutdown() + // We add this after shutdown so that it can be called + // before server shutdown happens as it can close + // the advanced logger and prevent the mlog call from working properly. + defer func() { + // A panic pass-through layer which just logs it + // and sends it upwards. + if x := recover(); x != nil { + var buf bytes.Buffer + pprof.Lookup("goroutine").WriteTo(&buf, 2) + mlog.Error("A panic occurred", + mlog.Any("error", x), + mlog.String("stack", buf.String())) + panic(x) + } + }() + + _, err = api4.Init(server) + if err != nil { + mlog.Error(err.Error()) + return err + } + + wsapi.Init(server) + web.New(server) + + err = server.Start() + if err != nil { + mlog.Error(err.Error()) + return err + } + + notifyReady() + + // Wiping off any signal handlers set before. + // This may come from intermediary signal handlers requiring to clean + // up resources before server.Start can finish. + signal.Reset(syscall.SIGINT, syscall.SIGTERM) + // wait for kill signal before attempting to gracefully shutdown + // the running service + signal.Notify(interruptChan, syscall.SIGINT, syscall.SIGTERM) + <-interruptChan + + return nil +} + +func notifyReady() { + // If the environment vars provide a systemd notification socket, + // notify systemd that the server is ready. + systemdSocket := os.Getenv("NOTIFY_SOCKET") + if systemdSocket != "" { + mlog.Info("Sending systemd READY notification.") + + err := sendSystemdReadyNotification(systemdSocket) + if err != nil { + mlog.Error(err.Error()) + } + } +} + +func sendSystemdReadyNotification(socketPath string) error { + msg := "READY=1" + addr := &net.UnixAddr{ + Name: socketPath, + Net: "unixgram", + } + conn, err := net.DialUnix(addr.Net, nil, addr) + if err != nil { + return err + } + defer conn.Close() + _, err = conn.Write([]byte(msg)) + return err +} diff --git a/cmd/mattermost/commands/server_test.go b/cmd/mattermost/commands/server_test.go new file mode 100644 index 00000000..9a6a852e --- /dev/null +++ b/cmd/mattermost/commands/server_test.go @@ -0,0 +1,152 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package commands + +import ( + "net" + "os" + "syscall" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/mattermost/mattermost/server/v8/channels/jobs" + "github.com/mattermost/mattermost/server/v8/config" +) + +const ( + unitTestListeningPort = "localhost:0" +) + +//nolint:golint,unused +type ServerTestHelper struct { + disableConfigWatch bool + interruptChan chan os.Signal + originalInterval int +} + +//nolint:golint,unused +func SetupServerTest(tb testing.TB) *ServerTestHelper { + if testing.Short() { + tb.SkipNow() + } + // Build a channel that will be used by the server to receive system signals... + interruptChan := make(chan os.Signal, 1) + // ...and sent it immediately a SIGINT value. + // This will make the server loop stop as soon as it started successfully. + interruptChan <- syscall.SIGINT + + // Let jobs poll for termination every 0.2s (instead of every 15s by default) + // Otherwise we would have to wait the whole polling duration before the test + // terminates. + originalInterval := jobs.DefaultWatcherPollingInterval + jobs.DefaultWatcherPollingInterval = 200 + + th := &ServerTestHelper{ + disableConfigWatch: true, + interruptChan: interruptChan, + originalInterval: originalInterval, + } + return th +} + +//nolint:golint,unused +func (th *ServerTestHelper) TearDownServerTest() { + jobs.DefaultWatcherPollingInterval = th.originalInterval +} + +func TestRunServerSuccess(t *testing.T) { + th := SetupServerTest(t) + defer th.TearDownServerTest() + + configStore := config.NewTestMemoryStore() + + // Use non-default listening port in case another server instance is already running. + cfg := configStore.Get() + *cfg.ServiceSettings.ListenAddress = unitTestListeningPort + cfg.SqlSettings = *mainHelper.GetSQLSettings() + configStore.Set(cfg) + + err := runServer(configStore, th.interruptChan) + require.NoError(t, err) +} + +func TestRunServerSystemdNotification(t *testing.T) { + th := SetupServerTest(t) + defer th.TearDownServerTest() + + // Get a random temporary filename for using as a mock systemd socket + socketFile, err := os.CreateTemp("", "mattermost-systemd-mock-socket-") + if err != nil { + panic(err) + } + socketPath := socketFile.Name() + os.Remove(socketPath) + + // Set the socket path in the process environment + originalSocket := os.Getenv("NOTIFY_SOCKET") + os.Setenv("NOTIFY_SOCKET", socketPath) + defer os.Setenv("NOTIFY_SOCKET", originalSocket) + + // Open the socket connection + addr := &net.UnixAddr{ + Name: socketPath, + Net: "unixgram", + } + connection, err := net.ListenUnixgram("unixgram", addr) + if err != nil { + panic(err) + } + defer connection.Close() + defer os.Remove(socketPath) + + // Listen for socket data + socketReader := make(chan string) + go func(ch chan string) { + buffer := make([]byte, 512) + count, readErr := connection.Read(buffer) + if readErr != nil { + panic(readErr) + } + data := buffer[0:count] + ch <- string(data) + }(socketReader) + + configStore := config.NewTestMemoryStore() + + // Use non-default listening port in case another server instance is already running. + cfg := configStore.Get() + *cfg.ServiceSettings.ListenAddress = unitTestListeningPort + cfg.SqlSettings = *mainHelper.GetSQLSettings() + configStore.Set(cfg) + + // Start and stop the server + err = runServer(configStore, th.interruptChan) + require.NoError(t, err) + + // Ensure the notification has been sent on the socket and is correct + notification := <-socketReader + require.Equal(t, notification, "READY=1") +} + +func TestRunServerNoSystemd(t *testing.T) { + th := SetupServerTest(t) + defer th.TearDownServerTest() + + // Temporarily remove any Systemd socket defined in the environment + originalSocket := os.Getenv("NOTIFY_SOCKET") + os.Unsetenv("NOTIFY_SOCKET") + defer os.Setenv("NOTIFY_SOCKET", originalSocket) + + configStore := config.NewTestMemoryStore() + + // Use non-default listening port in case another server instance is already running. + cfg := configStore.Get() + *cfg.ServiceSettings.ListenAddress = unitTestListeningPort + cfg.SqlSettings = *mainHelper.GetSQLSettings() + configStore.Set(cfg) + + err := runServer(configStore, th.interruptChan) + require.NoError(t, err) +} diff --git a/cmd/mattermost/commands/test.go b/cmd/mattermost/commands/test.go new file mode 100644 index 00000000..cca8d5bd --- /dev/null +++ b/cmd/mattermost/commands/test.go @@ -0,0 +1,154 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package commands + +import ( + "bufio" + "fmt" + "os" + "os/exec" + "os/signal" + "syscall" + + "github.com/spf13/cobra" + + "github.com/mattermost/mattermost/server/public/model" + "github.com/mattermost/mattermost/server/public/shared/i18n" + "github.com/mattermost/mattermost/server/v8/channels/api4" + "github.com/mattermost/mattermost/server/v8/channels/app" + "github.com/mattermost/mattermost/server/v8/channels/wsapi" +) + +var TestCmd = &cobra.Command{ + Use: "test", + Short: "Testing Commands", + Hidden: true, +} + +var RunWebClientTestsCmd = &cobra.Command{ + Use: "web_client_tests", + Short: "Run the web client tests", + RunE: webClientTestsCmdF, +} + +var RunServerForWebClientTestsCmd = &cobra.Command{ + Use: "web_client_tests_server", + Short: "Run the server configured for running the web client tests against it", + RunE: serverForWebClientTestsCmdF, +} + +func init() { + TestCmd.AddCommand( + RunWebClientTestsCmd, + RunServerForWebClientTestsCmd, + ) + RootCmd.AddCommand(TestCmd) +} + +func webClientTestsCmdF(command *cobra.Command, args []string) error { + a, err := InitDBCommandContextCobra(command, app.StartMetrics) + if err != nil { + return err + } + defer a.Srv().Shutdown() + + i18n.InitTranslations(*a.Config().LocalizationSettings.DefaultServerLocale, *a.Config().LocalizationSettings.DefaultClientLocale) + serverErr := a.Srv().Start() + if serverErr != nil { + return serverErr + } + + _, err = api4.Init(a.Srv()) + if err != nil { + return err + } + wsapi.Init(a.Srv()) + a.UpdateConfig(setupClientTests) + runWebClientTests() + + return nil +} + +func serverForWebClientTestsCmdF(command *cobra.Command, args []string) error { + a, err := InitDBCommandContextCobra(command, app.StartMetrics) + if err != nil { + return err + } + defer a.Srv().Shutdown() + + i18n.InitTranslations(*a.Config().LocalizationSettings.DefaultServerLocale, *a.Config().LocalizationSettings.DefaultClientLocale) + serverErr := a.Srv().Start() + if serverErr != nil { + return serverErr + } + + _, err = api4.Init(a.Srv()) + if err != nil { + return err + } + wsapi.Init(a.Srv()) + a.UpdateConfig(setupClientTests) + + c := make(chan os.Signal, 1) + signal.Notify(c, os.Interrupt, syscall.SIGINT, syscall.SIGTERM) + <-c + + return nil +} + +func setupClientTests(cfg *model.Config) { + *cfg.TeamSettings.EnableOpenServer = true + *cfg.ServiceSettings.EnableCommands = false + *cfg.ServiceSettings.EnableCustomEmoji = true + *cfg.ServiceSettings.EnableIncomingWebhooks = false + *cfg.ServiceSettings.EnableOutgoingWebhooks = false + *cfg.ServiceSettings.EnableOutgoingOAuthConnections = false +} + +func executeTestCommand(command *exec.Cmd) { + cmdOutPipe, err := command.StdoutPipe() + if err != nil { + CommandPrintErrorln("Failed to run tests") + os.Exit(1) + return + } + + cmdErrOutPipe, err := command.StderrPipe() + if err != nil { + CommandPrintErrorln("Failed to run tests") + os.Exit(1) + return + } + + cmdOutReader := bufio.NewScanner(cmdOutPipe) + cmdErrOutReader := bufio.NewScanner(cmdErrOutPipe) + go func() { + for cmdOutReader.Scan() { + fmt.Println(cmdOutReader.Text()) + } + }() + + go func() { + for cmdErrOutReader.Scan() { + fmt.Println(cmdErrOutReader.Text()) + } + }() + + if err := command.Run(); err != nil { + CommandPrintErrorln("Client Tests failed") + os.Exit(1) + return + } +} + +func runWebClientTests() { + if webappDir := os.Getenv("WEBAPP_DIR"); webappDir != "" { + os.Chdir(webappDir) + } else { + os.Chdir("../mattermost-webapp") + } + + cmd := exec.Command("npm", "test") + executeTestCommand(cmd) +} diff --git a/cmd/mattermost/commands/utils.go b/cmd/mattermost/commands/utils.go new file mode 100644 index 00000000..163aab81 --- /dev/null +++ b/cmd/mattermost/commands/utils.go @@ -0,0 +1,91 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package commands + +import ( + "bytes" + "encoding/json" + "fmt" + "os" + "reflect" + "sort" + "strings" + + "github.com/spf13/cobra" + + "github.com/mattermost/mattermost/server/public/model" +) + +const CustomDefaultsEnvVar = "MM_CUSTOM_DEFAULTS_PATH" + +// printStringMap takes a reflect.Value and prints it out alphabetically based on key values, which must be strings. +// This is done recursively if it's a map, and uses the given tab settings. +func printStringMap(value reflect.Value, tabVal int) string { + out := &bytes.Buffer{} + + var sortedKeys []string + stringToKeyMap := make(map[string]reflect.Value) + for _, k := range value.MapKeys() { + sortedKeys = append(sortedKeys, k.String()) + stringToKeyMap[k.String()] = k + } + + sort.Strings(sortedKeys) + + for _, keyString := range sortedKeys { + key := stringToKeyMap[keyString] + val := value.MapIndex(key) + if newVal, ok := val.Interface().(map[string]any); !ok { + fmt.Fprintf(out, "%s", strings.Repeat("\t", tabVal)) + fmt.Fprintf(out, "%v: \"%v\"\n", key.Interface(), val.Interface()) + } else { + fmt.Fprintf(out, "%s", strings.Repeat("\t", tabVal)) + fmt.Fprintf(out, "%v:\n", key.Interface()) + // going one level in, increase the tab + tabVal++ + fmt.Fprintf(out, "%s", printStringMap(reflect.ValueOf(newVal), tabVal)) + // coming back one level, decrease the tab + tabVal-- + } + } + + return out.String() +} + +func getConfigDSN(command *cobra.Command, env map[string]string) string { + configDSN, _ := command.Flags().GetString("config") + + // Config not supplied in flag, check env + if configDSN == "" { + configDSN = env["MM_CONFIG"] + } + + // Config not supplied in env or flag use default + if configDSN == "" { + configDSN = "config.json" + } + + return configDSN +} + +func loadCustomDefaults() (*model.Config, error) { + customDefaultsPath := os.Getenv(CustomDefaultsEnvVar) + if customDefaultsPath == "" { + return nil, nil + } + + file, err := os.Open(customDefaultsPath) + if err != nil { + return nil, fmt.Errorf("unable to open custom defaults file at %q: %w", customDefaultsPath, err) + } + defer file.Close() + + var customDefaults *model.Config + err = json.NewDecoder(file).Decode(&customDefaults) + if err != nil { + return nil, fmt.Errorf("unable to decode custom defaults configuration: %w", err) + } + + return customDefaults, nil +} diff --git a/cmd/mattermost/commands/utils_test.go b/cmd/mattermost/commands/utils_test.go new file mode 100644 index 00000000..a3c0318d --- /dev/null +++ b/cmd/mattermost/commands/utils_test.go @@ -0,0 +1,72 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package commands + +import ( + "reflect" + "sort" + "strings" + "testing" +) + +func TestPrintMap(t *testing.T) { + inputCases := []any{ + map[string]any{ + "CustomerType": "A9", + "SmtpUsername": "", + "SmtpPassword": "", + "EmailAddress": "", + }, + map[string]any{ + "EnableExport": false, + "ExportFormat": "actiance", + "DailyRunTime": "01:00", + "GlobalRelaySettings": map[string]any{ + "CustomerType": "A9", + "SmtpUsername": "", + "SmtpPassword": "", + "EmailAddress": "", + }, + }, + } + + outputCases := []string{ + "CustomerType: \"A9\"\nSmtpUsername: \"\"\nSmtpPassword: \"\"\nEmailAddress: \"\"\n", + "EnableExport: \"false\"\nExportFormat: \"actiance\"\nDailyRunTime: \"01:00\"\nGlobalRelaySettings:\n\t CustomerType: \"A9\"\n\tSmtpUsername: \"\"\n\tSmtpPassword: \"\"\n\tEmailAddress: \"\"\n", + } + + cases := []struct { + Name string + Input reflect.Value + Expected string + }{ + { + Name: "Basic print", + Input: reflect.ValueOf(inputCases[0]), + Expected: outputCases[0], + }, + { + Name: "Complex print", + Input: reflect.ValueOf(inputCases[1]), + Expected: outputCases[1], + }, + } + + for _, test := range cases { + t.Run(test.Name, func(t *testing.T) { + res := printStringMap(test.Input, 0) + + // create two slice of string formed by splitting our strings on \n + slice1 := strings.Split(res, "\n") + slice2 := strings.Split(res, "\n") + + sort.Strings(slice1) + sort.Strings(slice2) + + if !reflect.DeepEqual(slice1, slice2) { + t.Errorf("got '%#v' want '%#v", slice1, slice2) + } + }) + } +} diff --git a/cmd/mattermost/commands/version.go b/cmd/mattermost/commands/version.go new file mode 100644 index 00000000..4ad25b42 --- /dev/null +++ b/cmd/mattermost/commands/version.go @@ -0,0 +1,30 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package commands + +import ( + "github.com/spf13/cobra" + + "github.com/mattermost/mattermost/server/public/model" +) + +var VersionCmd = &cobra.Command{ + Use: "version", + Short: "Display version information", + RunE: versionCmdF, +} + +func init() { + RootCmd.AddCommand(VersionCmd) +} + +func versionCmdF(command *cobra.Command, args []string) error { + CommandPrintln("Version: " + model.CurrentVersion) + CommandPrintln("Build Number: " + model.BuildNumber) + CommandPrintln("Build Date: " + model.BuildDate) + CommandPrintln("Build Hash: " + model.BuildHash) + CommandPrintln("Build Enterprise Ready: " + model.BuildEnterpriseReady) + + return nil +} diff --git a/cmd/mattermost/commands/version_test.go b/cmd/mattermost/commands/version_test.go new file mode 100644 index 00000000..57e373bf --- /dev/null +++ b/cmd/mattermost/commands/version_test.go @@ -0,0 +1,19 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package commands + +import ( + "testing" +) + +func TestVersion(t *testing.T) { + if testing.Short() { + t.Skip("skipping version test in short mode") + } + + th := SetupWithStoreMock(t) + defer th.TearDown() + + th.CheckCommand(t, "version") +} diff --git a/cmd/mattermost/main.go b/cmd/mattermost/main.go new file mode 100644 index 00000000..5fb4b4a0 --- /dev/null +++ b/cmd/mattermost/main.go @@ -0,0 +1,23 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package main + +import ( + "os" + + "github.com/mattermost/mattermost/server/v8/cmd/mattermost/commands" + // Import and register app layer slash commands + _ "github.com/mattermost/mattermost/server/v8/channels/app/slashcommands" + // Plugins + _ "github.com/mattermost/mattermost/server/v8/channels/app/oauthproviders/gitlab" + + // Enterprise Imports + _ "github.com/mattermost/mattermost/server/v8/enterprise" +) + +func main() { + if err := commands.Run(os.Args[1:]); err != nil { + os.Exit(1) + } +} diff --git a/cmd/mattermost/main_test.go b/cmd/mattermost/main_test.go new file mode 100644 index 00000000..e6fa87e1 --- /dev/null +++ b/cmd/mattermost/main_test.go @@ -0,0 +1,19 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +//go:build maincoverage + +package main + +import ( + "testing" +) + +// TestRunMain can be used to track code coverage in integration tests. +// To run this: +// go test -coverpkg="<>" -ldflags '<>' -tags maincoverage -c ./cmd/mattermost/ +// ./mattermost.test -test.run="^TestRunMain$" -test.coverprofile=coverage.out +// And then run your integration tests. +func TestRunMain(t *testing.T) { + main() +} diff --git a/vendor/github.com/mattermost/go-i18n/LICENSE b/vendor/github.com/mattermost/go-i18n/LICENSE new file mode 100644 index 00000000..609cce79 --- /dev/null +++ b/vendor/github.com/mattermost/go-i18n/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2014 Nick Snyder https://github.com/nicksnyder + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/vendor/github.com/mattermost/go-i18n/i18n/bundle/bundle.go b/vendor/github.com/mattermost/go-i18n/i18n/bundle/bundle.go new file mode 100644 index 00000000..9c572c07 --- /dev/null +++ b/vendor/github.com/mattermost/go-i18n/i18n/bundle/bundle.go @@ -0,0 +1,453 @@ +// Package bundle manages translations for multiple languages. +package bundle + +import ( + "bytes" + "encoding/json" + "fmt" + "io/ioutil" + "path/filepath" + "reflect" + "sync" + "unicode" + + "github.com/mattermost/go-i18n/i18n/language" + "github.com/mattermost/go-i18n/i18n/translation" + toml "github.com/pelletier/go-toml" + "gopkg.in/yaml.v2" +) + +// TranslateFunc is a copy of i18n.TranslateFunc to avoid a circular dependency. +type TranslateFunc func(translationID string, args ...interface{}) string + +// Bundle stores the translations for multiple languages. +type Bundle struct { + // The primary translations for a language tag and translation id. + translations map[string]map[string]translation.Translation + + // Translations that can be used when an exact language match is not possible. + fallbackTranslations map[string]map[string]translation.Translation + + sync.RWMutex +} + +// New returns an empty bundle. +func New() *Bundle { + return &Bundle{ + translations: make(map[string]map[string]translation.Translation), + fallbackTranslations: make(map[string]map[string]translation.Translation), + } +} + +// MustLoadTranslationFile is similar to LoadTranslationFile +// except it panics if an error happens. +func (b *Bundle) MustLoadTranslationFile(filename string) { + if err := b.LoadTranslationFile(filename); err != nil { + panic(err) + } +} + +// LoadTranslationFile loads the translations from filename into memory. +// +// The language that the translations are associated with is parsed from the filename (e.g. en-US.json). +// +// Generally you should load translation files once during your program's initialization. +func (b *Bundle) LoadTranslationFile(filename string) error { + buf, err := ioutil.ReadFile(filename) + if err != nil { + return err + } + return b.ParseTranslationFileBytes(filename, buf) +} + +// ParseTranslationFileBytes is similar to LoadTranslationFile except it parses the bytes in buf. +// +// It is useful for parsing translation files embedded with go-bindata. +func (b *Bundle) ParseTranslationFileBytes(filename string, buf []byte) error { + basename := filepath.Base(filename) + langs := language.Parse(basename) + switch l := len(langs); { + case l == 0: + return fmt.Errorf("no language found in %q", basename) + case l > 1: + return fmt.Errorf("multiple languages found in filename %q: %v; expected one", basename, langs) + } + translations, err := parseTranslations(filename, buf) + if err != nil { + return err + } + b.AddTranslation(langs[0], translations...) + return nil +} + +func parseTranslations(filename string, buf []byte) ([]translation.Translation, error) { + if len(buf) == 0 { + return []translation.Translation{}, nil + } + + ext := filepath.Ext(filename) + + // `github.com/pelletier/go-toml` lacks an Unmarshal function, + // so we should parse TOML separately. + if ext == ".toml" { + tree, err := toml.LoadReader(bytes.NewReader(buf)) + if err != nil { + return nil, err + } + + m := make(map[string]map[string]interface{}) + for k, v := range tree.ToMap() { + m[k] = v.(map[string]interface{}) + } + + return parseFlatFormat(m) + } + + // Then parse other formats. + if isStandardFormat(ext, buf) { + var standardFormat []map[string]interface{} + if err := unmarshal(ext, buf, &standardFormat); err != nil { + return nil, fmt.Errorf("failed to unmarshal %v: %v", filename, err) + } + return parseStandardFormat(standardFormat) + } + var flatFormat map[string]map[string]interface{} + if err := unmarshal(ext, buf, &flatFormat); err != nil { + return nil, fmt.Errorf("failed to unmarshal %v: %v", filename, err) + } + return parseFlatFormat(flatFormat) +} + +func isStandardFormat(ext string, buf []byte) bool { + buf = deleteLeadingComments(ext, buf) + firstRune := rune(buf[0]) + return (ext == ".json" && firstRune == '[') || (ext == ".yaml" && firstRune == '-') +} + +// deleteLeadingComments deletes leading newlines and comments in buf. +// It only works for ext == ".yaml". +func deleteLeadingComments(ext string, buf []byte) []byte { + if ext != ".yaml" { + return buf + } + + for { + buf = bytes.TrimLeftFunc(buf, unicode.IsSpace) + if buf[0] == '#' { + buf = deleteLine(buf) + } else { + break + } + } + + return buf +} + +func deleteLine(buf []byte) []byte { + index := bytes.IndexRune(buf, '\n') + if index == -1 { // If there is only one line without newline ... + return nil // ... delete it and return nothing. + } + if index == len(buf)-1 { // If there is only one line with newline ... + return nil // ... do the same as above. + } + return buf[index+1:] +} + +// unmarshal finds an appropriate unmarshal function for ext +// (extension of filename) and unmarshals buf to out. out must be a pointer. +func unmarshal(ext string, buf []byte, out interface{}) error { + switch ext { + case ".json": + return json.Unmarshal(buf, out) + case ".yaml": + return yaml.Unmarshal(buf, out) + } + + return fmt.Errorf("unsupported file extension %v", ext) +} + +func parseStandardFormat(data []map[string]interface{}) ([]translation.Translation, error) { + translations := make([]translation.Translation, 0, len(data)) + for i, translationData := range data { + t, err := translation.NewTranslation(translationData) + if err != nil { + return nil, fmt.Errorf("unable to parse translation #%d because %s\n%v", i, err, translationData) + } + translations = append(translations, t) + } + return translations, nil +} + +// parseFlatFormat just converts data from flat format to standard format +// and passes it to parseStandardFormat. +// +// Flat format logic: +// key of data must be a string and data[key] must be always map[string]interface{}, +// but if there is only "other" key in it then it is non-plural, else plural. +func parseFlatFormat(data map[string]map[string]interface{}) ([]translation.Translation, error) { + var standardFormatData []map[string]interface{} + for id, translationData := range data { + dataObject := make(map[string]interface{}) + dataObject["id"] = id + if len(translationData) == 1 { // non-plural form + _, otherExists := translationData["other"] + if otherExists { + dataObject["translation"] = translationData["other"] + } + } else { // plural form + dataObject["translation"] = translationData + } + + standardFormatData = append(standardFormatData, dataObject) + } + + return parseStandardFormat(standardFormatData) +} + +// AddTranslation adds translations for a language. +// +// It is useful if your translations are in a format not supported by LoadTranslationFile. +func (b *Bundle) AddTranslation(lang *language.Language, translations ...translation.Translation) { + b.Lock() + defer b.Unlock() + if b.translations[lang.Tag] == nil { + b.translations[lang.Tag] = make(map[string]translation.Translation, len(translations)) + } + currentTranslations := b.translations[lang.Tag] + for _, newTranslation := range translations { + if currentTranslation := currentTranslations[newTranslation.ID()]; currentTranslation != nil { + currentTranslations[newTranslation.ID()] = currentTranslation.Merge(newTranslation) + } else { + currentTranslations[newTranslation.ID()] = newTranslation + } + } + + // lang can provide translations for less specific language tags. + for _, tag := range lang.MatchingTags() { + b.fallbackTranslations[tag] = currentTranslations + } +} + +// Translations returns all translations in the bundle. +func (b *Bundle) Translations() map[string]map[string]translation.Translation { + t := make(map[string]map[string]translation.Translation) + b.RLock() + for tag, translations := range b.translations { + t[tag] = make(map[string]translation.Translation) + for id, translation := range translations { + t[tag][id] = translation + } + } + b.RUnlock() + return t +} + +// LanguageTags returns the tags of all languages that that have been added. +func (b *Bundle) LanguageTags() []string { + var tags []string + b.RLock() + for k := range b.translations { + tags = append(tags, k) + } + b.RUnlock() + return tags +} + +// LanguageTranslationIDs returns the ids of all translations that have been added for a given language. +func (b *Bundle) LanguageTranslationIDs(languageTag string) []string { + var ids []string + b.RLock() + for id := range b.translations[languageTag] { + ids = append(ids, id) + } + b.RUnlock() + return ids +} + +// MustTfunc is similar to Tfunc except it panics if an error happens. +func (b *Bundle) MustTfunc(pref string, prefs ...string) TranslateFunc { + tfunc, err := b.Tfunc(pref, prefs...) + if err != nil { + panic(err) + } + return tfunc +} + +// MustTfuncAndLanguage is similar to TfuncAndLanguage except it panics if an error happens. +func (b *Bundle) MustTfuncAndLanguage(pref string, prefs ...string) (TranslateFunc, *language.Language) { + tfunc, language, err := b.TfuncAndLanguage(pref, prefs...) + if err != nil { + panic(err) + } + return tfunc, language +} + +// Tfunc is similar to TfuncAndLanguage except is doesn't return the Language. +func (b *Bundle) Tfunc(pref string, prefs ...string) (TranslateFunc, error) { + tfunc, _, err := b.TfuncAndLanguage(pref, prefs...) + return tfunc, err +} + +// TfuncAndLanguage returns a TranslateFunc for the first Language that +// has a non-zero number of translations in the bundle. +// +// The returned Language matches the the first language preference that could be satisfied, +// but this may not strictly match the language of the translations used to satisfy that preference. +// +// For example, the user may request "zh". If there are no translations for "zh" but there are translations +// for "zh-cn", then the translations for "zh-cn" will be used but the returned Language will be "zh". +// +// It can parse languages from Accept-Language headers (RFC 2616), +// but it assumes weights are monotonically decreasing. +func (b *Bundle) TfuncAndLanguage(pref string, prefs ...string) (TranslateFunc, *language.Language, error) { + lang := b.supportedLanguage(pref, prefs...) + var err error + if lang == nil { + err = fmt.Errorf("no supported languages found %#v", append(prefs, pref)) + } + return func(translationID string, args ...interface{}) string { + return b.translate(lang, translationID, args...) + }, lang, err +} + +// supportedLanguage returns the first language which +// has a non-zero number of translations in the bundle. +func (b *Bundle) supportedLanguage(pref string, prefs ...string) *language.Language { + lang := b.translatedLanguage(pref) + if lang == nil { + for _, pref := range prefs { + lang = b.translatedLanguage(pref) + if lang != nil { + break + } + } + } + return lang +} + +func (b *Bundle) translatedLanguage(src string) *language.Language { + langs := language.Parse(src) + b.RLock() + defer b.RUnlock() + for _, lang := range langs { + if len(b.translations[lang.Tag]) > 0 || + len(b.fallbackTranslations[lang.Tag]) > 0 { + return lang + } + } + return nil +} + +func (b *Bundle) translate(lang *language.Language, translationID string, args ...interface{}) string { + if lang == nil { + return translationID + } + + translation := b.translation(lang, translationID) + if translation == nil { + return translationID + } + + var data interface{} + var count interface{} + if argc := len(args); argc > 0 { + if isNumber(args[0]) { + count = args[0] + if argc > 1 { + data = args[1] + } + } else { + data = args[0] + } + } + + if count != nil { + if data == nil { + data = map[string]interface{}{"Count": count} + } else { + dataMap := toMap(data) + dataMap["Count"] = count + data = dataMap + } + } else { + dataMap := toMap(data) + if c, ok := dataMap["Count"]; ok { + count = c + } + } + + p, _ := lang.Plural(count) + template := translation.Template(p) + if template == nil { + if p == language.Other { + return translationID + } + countInt, ok := count.(int) + if ok && countInt > 1 { + template = translation.Template(language.Other) + } + } + + if template == nil { + return translationID + } + + s := template.Execute(data) + if s == "" { + return translationID + } + return s +} + +func (b *Bundle) translation(lang *language.Language, translationID string) translation.Translation { + b.RLock() + defer b.RUnlock() + translations := b.translations[lang.Tag] + if translations == nil { + translations = b.fallbackTranslations[lang.Tag] + if translations == nil { + return nil + } + } + return translations[translationID] +} + +func isNumber(n interface{}) bool { + switch n.(type) { + case int, int8, int16, int32, int64, string: + return true + } + return false +} + +func toMap(input interface{}) map[string]interface{} { + if data, ok := input.(map[string]interface{}); ok { + return data + } + v := reflect.ValueOf(input) + switch v.Kind() { + case reflect.Ptr: + return toMap(v.Elem().Interface()) + case reflect.Struct: + return structToMap(v) + default: + return nil + } +} + +// Converts the top level of a struct to a map[string]interface{}. +// Code inspired by github.com/fatih/structs. +func structToMap(v reflect.Value) map[string]interface{} { + out := make(map[string]interface{}) + t := v.Type() + for i := 0; i < t.NumField(); i++ { + field := t.Field(i) + if field.PkgPath != "" { + // unexported field. skip. + continue + } + out[field.Name] = v.FieldByName(field.Name).Interface() + } + return out +} diff --git a/vendor/github.com/mattermost/go-i18n/i18n/i18n.go b/vendor/github.com/mattermost/go-i18n/i18n/i18n.go new file mode 100644 index 00000000..4c95b996 --- /dev/null +++ b/vendor/github.com/mattermost/go-i18n/i18n/i18n.go @@ -0,0 +1,158 @@ +// Package i18n supports string translations with variable substitution and CLDR pluralization. +// It is intended to be used in conjunction with the goi18n command, although that is not strictly required. +// +// Initialization +// +// Your Go program should load translations during its initialization. +// i18n.MustLoadTranslationFile("path/to/fr-FR.all.json") +// If your translations are in a file format not supported by (Must)?LoadTranslationFile, +// then you can use the AddTranslation function to manually add translations. +// +// Fetching a translation +// +// Use Tfunc or MustTfunc to fetch a TranslateFunc that will return the translated string for a specific language. +// func handleRequest(w http.ResponseWriter, r *http.Request) { +// cookieLang := r.Cookie("lang") +// acceptLang := r.Header.Get("Accept-Language") +// defaultLang = "en-US" // known valid language +// T, err := i18n.Tfunc(cookieLang, acceptLang, defaultLang) +// fmt.Println(T("Hello world")) +// } +// +// Usually it is a good idea to identify strings by a generic id rather than the English translation, +// but the rest of this documentation will continue to use the English translation for readability. +// T("Hello world") // ok +// T("programGreeting") // better! +// +// Variables +// +// TranslateFunc supports strings that have variables using the text/template syntax. +// T("Hello {{.Person}}", map[string]interface{}{ +// "Person": "Bob", +// }) +// +// Pluralization +// +// TranslateFunc supports the pluralization of strings using the CLDR pluralization rules defined here: +// http://www.unicode.org/cldr/charts/latest/supplemental/language_plural_rules.html +// T("You have {{.Count}} unread emails.", 2) +// T("I am {{.Count}} meters tall.", "1.7") +// +// Plural strings may also have variables. +// T("{{.Person}} has {{.Count}} unread emails", 2, map[string]interface{}{ +// "Person": "Bob", +// }) +// +// Sentences with multiple plural components can be supported with nesting. +// T("{{.Person}} has {{.Count}} unread emails in the past {{.Timeframe}}.", 3, map[string]interface{}{ +// "Person": "Bob", +// "Timeframe": T("{{.Count}} days", 2), +// }) +// +// Templates +// +// You can use the .Funcs() method of a text/template or html/template to register a TranslateFunc +// for usage inside of that template. +package i18n + +import ( + "github.com/mattermost/go-i18n/i18n/bundle" + "github.com/mattermost/go-i18n/i18n/language" + "github.com/mattermost/go-i18n/i18n/translation" +) + +// TranslateFunc returns the translation of the string identified by translationID. +// +// If there is no translation for translationID, then the translationID itself is returned. +// This makes it easy to identify missing translations in your app. +// +// If translationID is a non-plural form, then the first variadic argument may be a map[string]interface{} +// or struct that contains template data. +// +// If translationID is a plural form, the function accepts two parameter signatures +// 1. T(count int, data struct{}) +// The first variadic argument must be an integer type +// (int, int8, int16, int32, int64) or a float formatted as a string (e.g. "123.45"). +// The second variadic argument may be a map[string]interface{} or struct{} that contains template data. +// 2. T(data struct{}) +// data must be a struct{} or map[string]interface{} that contains a Count field and the template data, +// Count field must be an integer type (int, int8, int16, int32, int64) +// or a float formatted as a string (e.g. "123.45"). +type TranslateFunc func(translationID string, args ...interface{}) string + +// IdentityTfunc returns a TranslateFunc that always returns the translationID passed to it. +// +// It is a useful placeholder when parsing a text/template or html/template +// before the actual Tfunc is available. +func IdentityTfunc() TranslateFunc { + return func(translationID string, args ...interface{}) string { + return translationID + } +} + +var defaultBundle = bundle.New() + +// MustLoadTranslationFile is similar to LoadTranslationFile +// except it panics if an error happens. +func MustLoadTranslationFile(filename string) { + defaultBundle.MustLoadTranslationFile(filename) +} + +// LoadTranslationFile loads the translations from filename into memory. +// +// The language that the translations are associated with is parsed from the filename (e.g. en-US.json). +// +// Generally you should load translation files once during your program's initialization. +func LoadTranslationFile(filename string) error { + return defaultBundle.LoadTranslationFile(filename) +} + +// ParseTranslationFileBytes is similar to LoadTranslationFile except it parses the bytes in buf. +// +// It is useful for parsing translation files embedded with go-bindata. +func ParseTranslationFileBytes(filename string, buf []byte) error { + return defaultBundle.ParseTranslationFileBytes(filename, buf) +} + +// AddTranslation adds translations for a language. +// +// It is useful if your translations are in a format not supported by LoadTranslationFile. +func AddTranslation(lang *language.Language, translations ...translation.Translation) { + defaultBundle.AddTranslation(lang, translations...) +} + +// LanguageTags returns the tags of all languages that have been added. +func LanguageTags() []string { + return defaultBundle.LanguageTags() +} + +// LanguageTranslationIDs returns the ids of all translations that have been added for a given language. +func LanguageTranslationIDs(languageTag string) []string { + return defaultBundle.LanguageTranslationIDs(languageTag) +} + +// MustTfunc is similar to Tfunc except it panics if an error happens. +func MustTfunc(languageSource string, languageSources ...string) TranslateFunc { + return TranslateFunc(defaultBundle.MustTfunc(languageSource, languageSources...)) +} + +// Tfunc returns a TranslateFunc that will be bound to the first language which +// has a non-zero number of translations. +// +// It can parse languages from Accept-Language headers (RFC 2616). +func Tfunc(languageSource string, languageSources ...string) (TranslateFunc, error) { + tfunc, err := defaultBundle.Tfunc(languageSource, languageSources...) + return TranslateFunc(tfunc), err +} + +// MustTfuncAndLanguage is similar to TfuncAndLanguage except it panics if an error happens. +func MustTfuncAndLanguage(languageSource string, languageSources ...string) (TranslateFunc, *language.Language) { + tfunc, lang := defaultBundle.MustTfuncAndLanguage(languageSource, languageSources...) + return TranslateFunc(tfunc), lang +} + +// TfuncAndLanguage is similar to Tfunc except it also returns the language which TranslateFunc is bound to. +func TfuncAndLanguage(languageSource string, languageSources ...string) (TranslateFunc, *language.Language, error) { + tfunc, lang, err := defaultBundle.TfuncAndLanguage(languageSource, languageSources...) + return TranslateFunc(tfunc), lang, err +} diff --git a/vendor/github.com/mattermost/go-i18n/i18n/language/language.go b/vendor/github.com/mattermost/go-i18n/i18n/language/language.go new file mode 100644 index 00000000..b045a275 --- /dev/null +++ b/vendor/github.com/mattermost/go-i18n/i18n/language/language.go @@ -0,0 +1,99 @@ +// Package language defines languages that implement CLDR pluralization. +package language + +import ( + "fmt" + "strings" +) + +// Language is a written human language. +type Language struct { + // Tag uniquely identifies the language as defined by RFC 5646. + // + // Most language tags are a two character language code (ISO 639-1) + // optionally followed by a dash and a two character country code (ISO 3166-1). + // (e.g. en, pt-br) + Tag string + *PluralSpec +} + +func (l *Language) String() string { + return l.Tag +} + +// MatchingTags returns the set of language tags that map to this Language. +// e.g. "zh-hans-cn" yields {"zh", "zh-hans", "zh-hans-cn"} +// BUG: This should be computed once and stored as a field on Language for efficiency, +// but this would require changing how Languages are constructed. +func (l *Language) MatchingTags() []string { + parts := strings.Split(l.Tag, "-") + var prefix, matches []string + for _, part := range parts { + prefix = append(prefix, part) + match := strings.Join(prefix, "-") + matches = append(matches, match) + } + return matches +} + +// Parse returns a slice of supported languages found in src or nil if none are found. +// It can parse language tags and Accept-Language headers. +func Parse(src string) []*Language { + var langs []*Language + start := 0 + for end, chr := range src { + switch chr { + case ',', ';', '.': + tag := strings.TrimSpace(src[start:end]) + if spec := GetPluralSpec(tag); spec != nil { + langs = append(langs, &Language{NormalizeTag(tag), spec}) + } + start = end + 1 + } + } + if start > 0 { + tag := strings.TrimSpace(src[start:]) + if spec := GetPluralSpec(tag); spec != nil { + langs = append(langs, &Language{NormalizeTag(tag), spec}) + } + return dedupe(langs) + } + if spec := GetPluralSpec(src); spec != nil { + langs = append(langs, &Language{NormalizeTag(src), spec}) + } + return langs +} + +func dedupe(langs []*Language) []*Language { + found := make(map[string]struct{}, len(langs)) + deduped := make([]*Language, 0, len(langs)) + for _, lang := range langs { + if _, ok := found[lang.Tag]; !ok { + found[lang.Tag] = struct{}{} + deduped = append(deduped, lang) + } + } + return deduped +} + +// MustParse is similar to Parse except it panics instead of retuning a nil Language. +func MustParse(src string) []*Language { + langs := Parse(src) + if len(langs) == 0 { + panic(fmt.Errorf("unable to parse language from %q", src)) + } + return langs +} + +// Add adds support for a new language. +func Add(l *Language) { + tag := NormalizeTag(l.Tag) + pluralSpecs[tag] = l.PluralSpec +} + +// NormalizeTag returns a language tag with all lower-case characters +// and dashes "-" instead of underscores "_" +func NormalizeTag(tag string) string { + tag = strings.ToLower(tag) + return strings.Replace(tag, "_", "-", -1) +} diff --git a/vendor/github.com/mattermost/go-i18n/i18n/language/operands.go b/vendor/github.com/mattermost/go-i18n/i18n/language/operands.go new file mode 100644 index 00000000..c0bee68d --- /dev/null +++ b/vendor/github.com/mattermost/go-i18n/i18n/language/operands.go @@ -0,0 +1,119 @@ +package language + +import ( + "fmt" + "strconv" + "strings" +) + +// Operands is a representation of http://unicode.org/reports/tr35/tr35-numbers.html#Operands +type Operands struct { + N float64 // absolute value of the source number (integer and decimals) + I int64 // integer digits of n + V int64 // number of visible fraction digits in n, with trailing zeros + W int64 // number of visible fraction digits in n, without trailing zeros + F int64 // visible fractional digits in n, with trailing zeros + T int64 // visible fractional digits in n, without trailing zeros +} + +// NequalsAny returns true if o represents an integer equal to any of the arguments. +func (o *Operands) NequalsAny(any ...int64) bool { + for _, i := range any { + if o.I == i && o.T == 0 { + return true + } + } + return false +} + +// NmodEqualsAny returns true if o represents an integer equal to any of the arguments modulo mod. +func (o *Operands) NmodEqualsAny(mod int64, any ...int64) bool { + modI := o.I % mod + for _, i := range any { + if modI == i && o.T == 0 { + return true + } + } + return false +} + +// NinRange returns true if o represents an integer in the closed interval [from, to]. +func (o *Operands) NinRange(from, to int64) bool { + return o.T == 0 && from <= o.I && o.I <= to +} + +// NmodInRange returns true if o represents an integer in the closed interval [from, to] modulo mod. +func (o *Operands) NmodInRange(mod, from, to int64) bool { + modI := o.I % mod + return o.T == 0 && from <= modI && modI <= to +} + +func newOperands(v interface{}) (*Operands, error) { + switch v := v.(type) { + case int: + return newOperandsInt64(int64(v)), nil + case int8: + return newOperandsInt64(int64(v)), nil + case int16: + return newOperandsInt64(int64(v)), nil + case int32: + return newOperandsInt64(int64(v)), nil + case int64: + return newOperandsInt64(v), nil + case string: + return newOperandsString(v) + case float32, float64: + return nil, fmt.Errorf("floats should be formatted into a string") + default: + return nil, fmt.Errorf("invalid type %T; expected integer or string", v) + } +} + +func newOperandsInt64(i int64) *Operands { + if i < 0 { + i = -i + } + return &Operands{float64(i), i, 0, 0, 0, 0} +} + +func newOperandsString(s string) (*Operands, error) { + if s[0] == '-' { + s = s[1:] + } + n, err := strconv.ParseFloat(s, 64) + if err != nil { + return nil, err + } + ops := &Operands{N: n} + parts := strings.SplitN(s, ".", 2) + ops.I, err = strconv.ParseInt(parts[0], 10, 64) + if err != nil { + return nil, err + } + if len(parts) == 1 { + return ops, nil + } + fraction := parts[1] + ops.V = int64(len(fraction)) + for i := ops.V - 1; i >= 0; i-- { + if fraction[i] != '0' { + ops.W = i + 1 + break + } + } + if ops.V > 0 { + f, err := strconv.ParseInt(fraction, 10, 0) + if err != nil { + return nil, err + } + ops.F = f + } + if ops.W > 0 { + t, err := strconv.ParseInt(fraction[:ops.W], 10, 0) + if err != nil { + return nil, err + } + ops.T = t + } + return ops, nil +} diff --git a/vendor/github.com/mattermost/go-i18n/i18n/language/plural.go b/vendor/github.com/mattermost/go-i18n/i18n/language/plural.go new file mode 100644 index 00000000..1f3ea5c6 --- /dev/null +++ b/vendor/github.com/mattermost/go-i18n/i18n/language/plural.go @@ -0,0 +1,40 @@ +package language + +import ( + "fmt" +) + +// Plural represents a language pluralization form as defined here: +// http://cldr.unicode.org/index/cldr-spec/plural-rules +type Plural string + +// All defined plural categories. +const ( + Invalid Plural = "invalid" + Zero = "zero" + One = "one" + Two = "two" + Few = "few" + Many = "many" + Other = "other" +) + +// NewPlural returns src as a Plural +// or Invalid and a non-nil error if src is not a valid Plural. +func NewPlural(src string) (Plural, error) { + switch src { + case "zero": + return Zero, nil + case "one": + return One, nil + case "two": + return Two, nil + case "few": + return Few, nil + case "many": + return Many, nil + case "other": + return Other, nil + } + return Invalid, fmt.Errorf("invalid plural category %s", src) +} diff --git a/vendor/github.com/mattermost/go-i18n/i18n/language/pluralspec.go b/vendor/github.com/mattermost/go-i18n/i18n/language/pluralspec.go new file mode 100644 index 00000000..fc31e880 --- /dev/null +++ b/vendor/github.com/mattermost/go-i18n/i18n/language/pluralspec.go @@ -0,0 +1,75 @@ +package language + +import "strings" + +// PluralSpec defines the CLDR plural rules for a language. +// http://www.unicode.org/cldr/charts/latest/supplemental/language_plural_rules.html +// http://unicode.org/reports/tr35/tr35-numbers.html#Operands +type PluralSpec struct { + Plurals map[Plural]struct{} + PluralFunc func(*Operands) Plural +} + +var pluralSpecs = make(map[string]*PluralSpec) + +func normalizePluralSpecID(id string) string { + id = strings.Replace(id, "_", "-", -1) + id = strings.ToLower(id) + return id +} + +// RegisterPluralSpec registers a new plural spec for the language ids. +func RegisterPluralSpec(ids []string, ps *PluralSpec) { + for _, id := range ids { + id = normalizePluralSpecID(id) + pluralSpecs[id] = ps + } +} + +// Plural returns the plural category for number as defined by +// the language's CLDR plural rules. +func (ps *PluralSpec) Plural(number interface{}) (Plural, error) { + ops, err := newOperands(number) + if err != nil { + return Invalid, err + } + return ps.PluralFunc(ops), nil +} + +// GetPluralSpec returns the PluralSpec that matches the longest prefix of tag. +// It returns nil if no PluralSpec matches tag. +func GetPluralSpec(tag string) *PluralSpec { + tag = NormalizeTag(tag) + subtag := tag + for { + if spec := pluralSpecs[subtag]; spec != nil { + return spec + } + end := strings.LastIndex(subtag, "-") + if end == -1 { + return nil + } + subtag = subtag[:end] + } +} + +func newPluralSet(plurals ...Plural) map[Plural]struct{} { + set := make(map[Plural]struct{}, len(plurals)) + for _, plural := range plurals { + set[plural] = struct{}{} + } + return set +} + +func intInRange(i, from, to int64) bool { + return from <= i && i <= to +} + +func intEqualsAny(i int64, any ...int64) bool { + for _, a := range any { + if i == a { + return true + } + } + return false +} diff --git a/vendor/github.com/mattermost/go-i18n/i18n/language/pluralspec_gen.go b/vendor/github.com/mattermost/go-i18n/i18n/language/pluralspec_gen.go new file mode 100644 index 00000000..0268bb92 --- /dev/null +++ b/vendor/github.com/mattermost/go-i18n/i18n/language/pluralspec_gen.go @@ -0,0 +1,557 @@ +package language + +// This file is generated by i18n/language/codegen/generate.sh + +func init() { + + RegisterPluralSpec([]string{"bm", "bo", "dz", "id", "ig", "ii", "in", "ja", "jbo", "jv", "jw", "kde", "kea", "km", "ko", "lkt", "lo", "ms", "my", "nqo", "root", "sah", "ses", "sg", "th", "to", "vi", "wo", "yo", "yue", "zh"}, &PluralSpec{ + Plurals: newPluralSet(Other), + PluralFunc: func(ops *Operands) Plural { + return Other + }, + }) + RegisterPluralSpec([]string{"am", "as", "bn", "fa", "gu", "hi", "kn", "mr", "zu"}, &PluralSpec{ + Plurals: newPluralSet(One, Other), + PluralFunc: func(ops *Operands) Plural { + // i = 0 or n = 1 + if intEqualsAny(ops.I, 0) || + ops.NequalsAny(1) { + return One + } + return Other + }, + }) + RegisterPluralSpec([]string{"ff", "fr", "hy", "kab"}, &PluralSpec{ + Plurals: newPluralSet(One, Other), + PluralFunc: func(ops *Operands) Plural { + // i = 0,1 + if intEqualsAny(ops.I, 0, 1) { + return One + } + return Other + }, + }) + RegisterPluralSpec([]string{"pt"}, &PluralSpec{ + Plurals: newPluralSet(One, Other), + PluralFunc: func(ops *Operands) Plural { + // i = 0..1 + if intInRange(ops.I, 0, 1) { + return One + } + return Other + }, + }) + RegisterPluralSpec([]string{"ast", "ca", "de", "en", "et", "fi", "fy", "gl", "it", "ji", "nl", "sv", "sw", "ur", "yi"}, &PluralSpec{ + Plurals: newPluralSet(One, Other), + PluralFunc: func(ops *Operands) Plural { + // i = 1 and v = 0 + if intEqualsAny(ops.I, 1) && intEqualsAny(ops.V, 0) { + return One + } + return Other + }, + }) + RegisterPluralSpec([]string{"si"}, &PluralSpec{ + Plurals: newPluralSet(One, Other), + PluralFunc: func(ops *Operands) Plural { + // n = 0,1 or i = 0 and f = 1 + if ops.NequalsAny(0, 1) || + intEqualsAny(ops.I, 0) && intEqualsAny(ops.F, 1) { + return One + } + return Other + }, + }) + RegisterPluralSpec([]string{"ak", "bh", "guw", "ln", "mg", "nso", "pa", "ti", "wa"}, &PluralSpec{ + Plurals: newPluralSet(One, Other), + PluralFunc: func(ops *Operands) Plural { + // n = 0..1 + if ops.NinRange(0, 1) { + return One + } + return Other + }, + }) + RegisterPluralSpec([]string{"tzm"}, &PluralSpec{ + Plurals: newPluralSet(One, Other), + PluralFunc: func(ops *Operands) Plural { + // n = 0..1 or n = 11..99 + if ops.NinRange(0, 1) || + ops.NinRange(11, 99) { + return One + } + return Other + }, + }) + RegisterPluralSpec([]string{"af", "asa", "az", "bem", "bez", "bg", "brx", "ce", "cgg", "chr", "ckb", "dv", "ee", "el", "eo", "es", "eu", "fo", "fur", "gsw", "ha", "haw", "hu", "jgo", "jmc", "ka", "kaj", "kcg", "kk", "kkj", "kl", "ks", "ksb", "ku", "ky", "lb", "lg", "mas", "mgo", "ml", "mn", "nah", "nb", "nd", "ne", "nn", "nnh", "no", "nr", "ny", "nyn", "om", "or", "os", "pap", "ps", "rm", "rof", "rwk", "saq", "sdh", "seh", "sn", "so", "sq", "ss", "ssy", "st", "syr", "ta", "te", "teo", "tig", "tk", "tn", "tr", "ts", "ug", "uz", "ve", "vo", "vun", "wae", "xh", "xog"}, &PluralSpec{ + Plurals: newPluralSet(One, Other), + PluralFunc: func(ops *Operands) Plural { + // n = 1 + if ops.NequalsAny(1) { + return One + } + return Other + }, + }) + RegisterPluralSpec([]string{"da"}, &PluralSpec{ + Plurals: newPluralSet(One, Other), + PluralFunc: func(ops *Operands) Plural { + // n = 1 or t != 0 and i = 0,1 + if ops.NequalsAny(1) || + !intEqualsAny(ops.T, 0) && intEqualsAny(ops.I, 0, 1) { + return One + } + return Other + }, + }) + RegisterPluralSpec([]string{"is"}, &PluralSpec{ + Plurals: newPluralSet(One, Other), + PluralFunc: func(ops *Operands) Plural { + // t = 0 and i % 10 = 1 and i % 100 != 11 or t != 0 + if intEqualsAny(ops.T, 0) && intEqualsAny(ops.I%10, 1) && !intEqualsAny(ops.I%100, 11) || + !intEqualsAny(ops.T, 0) { + return One + } + return Other + }, + }) + RegisterPluralSpec([]string{"mk"}, &PluralSpec{ + Plurals: newPluralSet(One, Other), + PluralFunc: func(ops *Operands) Plural { + // v = 0 and i % 10 = 1 or f % 10 = 1 + if intEqualsAny(ops.V, 0) && intEqualsAny(ops.I%10, 1) || + intEqualsAny(ops.F%10, 1) { + return One + } + return Other + }, + }) + RegisterPluralSpec([]string{"fil", "tl"}, &PluralSpec{ + Plurals: newPluralSet(One, Other), + PluralFunc: func(ops *Operands) Plural { + // v = 0 and i = 1,2,3 or v = 0 and i % 10 != 4,6,9 or v != 0 and f % 10 != 4,6,9 + if intEqualsAny(ops.V, 0) && intEqualsAny(ops.I, 1, 2, 3) || + intEqualsAny(ops.V, 0) && !intEqualsAny(ops.I%10, 4, 6, 9) || + !intEqualsAny(ops.V, 0) && !intEqualsAny(ops.F%10, 4, 6, 9) { + return One + } + return Other + }, + }) + RegisterPluralSpec([]string{"lv", "prg"}, &PluralSpec{ + Plurals: newPluralSet(Zero, One, Other), + PluralFunc: func(ops *Operands) Plural { + // n % 10 = 0 or n % 100 = 11..19 or v = 2 and f % 100 = 11..19 + if ops.NmodEqualsAny(10, 0) || + ops.NmodInRange(100, 11, 19) || + intEqualsAny(ops.V, 2) && intInRange(ops.F%100, 11, 19) { + return Zero + } + // n % 10 = 1 and n % 100 != 11 or v = 2 and f % 10 = 1 and f % 100 != 11 or v != 2 and f % 10 = 1 + if ops.NmodEqualsAny(10, 1) && !ops.NmodEqualsAny(100, 11) || + intEqualsAny(ops.V, 2) && intEqualsAny(ops.F%10, 1) && !intEqualsAny(ops.F%100, 11) || + !intEqualsAny(ops.V, 2) && intEqualsAny(ops.F%10, 1) { + return One + } + return Other + }, + }) + RegisterPluralSpec([]string{"lag"}, &PluralSpec{ + Plurals: newPluralSet(Zero, One, Other), + PluralFunc: func(ops *Operands) Plural { + // n = 0 + if ops.NequalsAny(0) { + return Zero + } + // i = 0,1 and n != 0 + if intEqualsAny(ops.I, 0, 1) && !ops.NequalsAny(0) { + return One + } + return Other + }, + }) + RegisterPluralSpec([]string{"ksh"}, &PluralSpec{ + Plurals: newPluralSet(Zero, One, Other), + PluralFunc: func(ops *Operands) Plural { + // n = 0 + if ops.NequalsAny(0) { + return Zero + } + // n = 1 + if ops.NequalsAny(1) { + return One + } + return Other + }, + }) + RegisterPluralSpec([]string{"iu", "kw", "naq", "se", "sma", "smi", "smj", "smn", "sms"}, &PluralSpec{ + Plurals: newPluralSet(One, Two, Other), + PluralFunc: func(ops *Operands) Plural { + // n = 1 + if ops.NequalsAny(1) { + return One + } + // n = 2 + if ops.NequalsAny(2) { + return Two + } + return Other + }, + }) + RegisterPluralSpec([]string{"shi"}, &PluralSpec{ + Plurals: newPluralSet(One, Few, Other), + PluralFunc: func(ops *Operands) Plural { + // i = 0 or n = 1 + if intEqualsAny(ops.I, 0) || + ops.NequalsAny(1) { + return One + } + // n = 2..10 + if ops.NinRange(2, 10) { + return Few + } + return Other + }, + }) + RegisterPluralSpec([]string{"mo", "ro"}, &PluralSpec{ + Plurals: newPluralSet(One, Few, Other), + PluralFunc: func(ops *Operands) Plural { + // i = 1 and v = 0 + if intEqualsAny(ops.I, 1) && intEqualsAny(ops.V, 0) { + return One + } + // v != 0 or n = 0 or n != 1 and n % 100 = 1..19 + if !intEqualsAny(ops.V, 0) || + ops.NequalsAny(0) || + !ops.NequalsAny(1) && ops.NmodInRange(100, 1, 19) { + return Few + } + return Other + }, + }) + RegisterPluralSpec([]string{"bs", "hr", "sh", "sr"}, &PluralSpec{ + Plurals: newPluralSet(One, Few, Other), + PluralFunc: func(ops *Operands) Plural { + // v = 0 and i % 10 = 1 and i % 100 != 11 or f % 10 = 1 and f % 100 != 11 + if intEqualsAny(ops.V, 0) && intEqualsAny(ops.I%10, 1) && !intEqualsAny(ops.I%100, 11) || + intEqualsAny(ops.F%10, 1) && !intEqualsAny(ops.F%100, 11) { + return One + } + // v = 0 and i % 10 = 2..4 and i % 100 != 12..14 or f % 10 = 2..4 and f % 100 != 12..14 + if intEqualsAny(ops.V, 0) && intInRange(ops.I%10, 2, 4) && !intInRange(ops.I%100, 12, 14) || + intInRange(ops.F%10, 2, 4) && !intInRange(ops.F%100, 12, 14) { + return Few + } + return Other + }, + }) + RegisterPluralSpec([]string{"gd"}, &PluralSpec{ + Plurals: newPluralSet(One, Two, Few, Other), + PluralFunc: func(ops *Operands) Plural { + // n = 1,11 + if ops.NequalsAny(1, 11) { + return One + } + // n = 2,12 + if ops.NequalsAny(2, 12) { + return Two + } + // n = 3..10,13..19 + if ops.NinRange(3, 10) || ops.NinRange(13, 19) { + return Few + } + return Other + }, + }) + RegisterPluralSpec([]string{"sl"}, &PluralSpec{ + Plurals: newPluralSet(One, Two, Few, Other), + PluralFunc: func(ops *Operands) Plural { + // v = 0 and i % 100 = 1 + if intEqualsAny(ops.V, 0) && intEqualsAny(ops.I%100, 1) { + return One + } + // v = 0 and i % 100 = 2 + if intEqualsAny(ops.V, 0) && intEqualsAny(ops.I%100, 2) { + return Two + } + // v = 0 and i % 100 = 3..4 or v != 0 + if intEqualsAny(ops.V, 0) && intInRange(ops.I%100, 3, 4) || + !intEqualsAny(ops.V, 0) { + return Few + } + return Other + }, + }) + RegisterPluralSpec([]string{"dsb", "hsb"}, &PluralSpec{ + Plurals: newPluralSet(One, Two, Few, Other), + PluralFunc: func(ops *Operands) Plural { + // v = 0 and i % 100 = 1 or f % 100 = 1 + if intEqualsAny(ops.V, 0) && intEqualsAny(ops.I%100, 1) || + intEqualsAny(ops.F%100, 1) { + return One + } + // v = 0 and i % 100 = 2 or f % 100 = 2 + if intEqualsAny(ops.V, 0) && intEqualsAny(ops.I%100, 2) || + intEqualsAny(ops.F%100, 2) { + return Two + } + // v = 0 and i % 100 = 3..4 or f % 100 = 3..4 + if intEqualsAny(ops.V, 0) && intInRange(ops.I%100, 3, 4) || + intInRange(ops.F%100, 3, 4) { + return Few + } + return Other + }, + }) + RegisterPluralSpec([]string{"he", "iw"}, &PluralSpec{ + Plurals: newPluralSet(One, Two, Many, Other), + PluralFunc: func(ops *Operands) Plural { + // i = 1 and v = 0 + if intEqualsAny(ops.I, 1) && intEqualsAny(ops.V, 0) { + return One + } + // i = 2 and v = 0 + if intEqualsAny(ops.I, 2) && intEqualsAny(ops.V, 0) { + return Two + } + // v = 0 and n != 0..10 and n % 10 = 0 + if intEqualsAny(ops.V, 0) && !ops.NinRange(0, 10) && ops.NmodEqualsAny(10, 0) { + return Many + } + return Other + }, + }) + RegisterPluralSpec([]string{"cs", "sk"}, &PluralSpec{ + Plurals: newPluralSet(One, Few, Many, Other), + PluralFunc: func(ops *Operands) Plural { + // i = 1 and v = 0 + if intEqualsAny(ops.I, 1) && intEqualsAny(ops.V, 0) { + return One + } + // i = 2..4 and v = 0 + if intInRange(ops.I, 2, 4) && intEqualsAny(ops.V, 0) { + return Few + } + // v != 0 + if !intEqualsAny(ops.V, 0) { + return Many + } + return Other + }, + }) + RegisterPluralSpec([]string{"pl"}, &PluralSpec{ + Plurals: newPluralSet(One, Few, Many, Other), + PluralFunc: func(ops *Operands) Plural { + // i = 1 and v = 0 + if intEqualsAny(ops.I, 1) && intEqualsAny(ops.V, 0) { + return One + } + // v = 0 and i % 10 = 2..4 and i % 100 != 12..14 + if intEqualsAny(ops.V, 0) && intInRange(ops.I%10, 2, 4) && !intInRange(ops.I%100, 12, 14) { + return Few + } + // v = 0 and i != 1 and i % 10 = 0..1 or v = 0 and i % 10 = 5..9 or v = 0 and i % 100 = 12..14 + if intEqualsAny(ops.V, 0) && !intEqualsAny(ops.I, 1) && intInRange(ops.I%10, 0, 1) || + intEqualsAny(ops.V, 0) && intInRange(ops.I%10, 5, 9) || + intEqualsAny(ops.V, 0) && intInRange(ops.I%100, 12, 14) { + return Many + } + return Other + }, + }) + RegisterPluralSpec([]string{"be"}, &PluralSpec{ + Plurals: newPluralSet(One, Few, Many, Other), + PluralFunc: func(ops *Operands) Plural { + // n % 10 = 1 and n % 100 != 11 + if ops.NmodEqualsAny(10, 1) && !ops.NmodEqualsAny(100, 11) { + return One + } + // n % 10 = 2..4 and n % 100 != 12..14 + if ops.NmodInRange(10, 2, 4) && !ops.NmodInRange(100, 12, 14) { + return Few + } + // n % 10 = 0 or n % 10 = 5..9 or n % 100 = 11..14 + if ops.NmodEqualsAny(10, 0) || + ops.NmodInRange(10, 5, 9) || + ops.NmodInRange(100, 11, 14) { + return Many + } + return Other + }, + }) + RegisterPluralSpec([]string{"lt"}, &PluralSpec{ + Plurals: newPluralSet(One, Few, Many, Other), + PluralFunc: func(ops *Operands) Plural { + // n % 10 = 1 and n % 100 != 11..19 + if ops.NmodEqualsAny(10, 1) && !ops.NmodInRange(100, 11, 19) { + return One + } + // n % 10 = 2..9 and n % 100 != 11..19 + if ops.NmodInRange(10, 2, 9) && !ops.NmodInRange(100, 11, 19) { + return Few + } + // f != 0 + if !intEqualsAny(ops.F, 0) { + return Many + } + return Other + }, + }) + RegisterPluralSpec([]string{"mt"}, &PluralSpec{ + Plurals: newPluralSet(One, Few, Many, Other), + PluralFunc: func(ops *Operands) Plural { + // n = 1 + if ops.NequalsAny(1) { + return One + } + // n = 0 or n % 100 = 2..10 + if ops.NequalsAny(0) || + ops.NmodInRange(100, 2, 10) { + return Few + } + // n % 100 = 11..19 + if ops.NmodInRange(100, 11, 19) { + return Many + } + return Other + }, + }) + RegisterPluralSpec([]string{"ru", "uk"}, &PluralSpec{ + Plurals: newPluralSet(One, Few, Many, Other), + PluralFunc: func(ops *Operands) Plural { + // v = 0 and i % 10 = 1 and i % 100 != 11 + if intEqualsAny(ops.V, 0) && intEqualsAny(ops.I%10, 1) && !intEqualsAny(ops.I%100, 11) { + return One + } + // v = 0 and i % 10 = 2..4 and i % 100 != 12..14 + if intEqualsAny(ops.V, 0) && intInRange(ops.I%10, 2, 4) && !intInRange(ops.I%100, 12, 14) { + return Few + } + // v = 0 and i % 10 = 0 or v = 0 and i % 10 = 5..9 or v = 0 and i % 100 = 11..14 + if intEqualsAny(ops.V, 0) && intEqualsAny(ops.I%10, 0) || + intEqualsAny(ops.V, 0) && intInRange(ops.I%10, 5, 9) || + intEqualsAny(ops.V, 0) && intInRange(ops.I%100, 11, 14) { + return Many + } + return Other + }, + }) + RegisterPluralSpec([]string{"br"}, &PluralSpec{ + Plurals: newPluralSet(One, Two, Few, Many, Other), + PluralFunc: func(ops *Operands) Plural { + // n % 10 = 1 and n % 100 != 11,71,91 + if ops.NmodEqualsAny(10, 1) && !ops.NmodEqualsAny(100, 11, 71, 91) { + return One + } + // n % 10 = 2 and n % 100 != 12,72,92 + if ops.NmodEqualsAny(10, 2) && !ops.NmodEqualsAny(100, 12, 72, 92) { + return Two + } + // n % 10 = 3..4,9 and n % 100 != 10..19,70..79,90..99 + if (ops.NmodInRange(10, 3, 4) || ops.NmodEqualsAny(10, 9)) && !(ops.NmodInRange(100, 10, 19) || ops.NmodInRange(100, 70, 79) || ops.NmodInRange(100, 90, 99)) { + return Few + } + // n != 0 and n % 1000000 = 0 + if !ops.NequalsAny(0) && ops.NmodEqualsAny(1000000, 0) { + return Many + } + return Other + }, + }) + RegisterPluralSpec([]string{"ga"}, &PluralSpec{ + Plurals: newPluralSet(One, Two, Few, Many, Other), + PluralFunc: func(ops *Operands) Plural { + // n = 1 + if ops.NequalsAny(1) { + return One + } + // n = 2 + if ops.NequalsAny(2) { + return Two + } + // n = 3..6 + if ops.NinRange(3, 6) { + return Few + } + // n = 7..10 + if ops.NinRange(7, 10) { + return Many + } + return Other + }, + }) + RegisterPluralSpec([]string{"gv"}, &PluralSpec{ + Plurals: newPluralSet(One, Two, Few, Many, Other), + PluralFunc: func(ops *Operands) Plural { + // v = 0 and i % 10 = 1 + if intEqualsAny(ops.V, 0) && intEqualsAny(ops.I%10, 1) { + return One + } + // v = 0 and i % 10 = 2 + if intEqualsAny(ops.V, 0) && intEqualsAny(ops.I%10, 2) { + return Two + } + // v = 0 and i % 100 = 0,20,40,60,80 + if intEqualsAny(ops.V, 0) && intEqualsAny(ops.I%100, 0, 20, 40, 60, 80) { + return Few + } + // v != 0 + if !intEqualsAny(ops.V, 0) { + return Many + } + return Other + }, + }) + RegisterPluralSpec([]string{"ar", "ars"}, &PluralSpec{ + Plurals: newPluralSet(Zero, One, Two, Few, Many, Other), + PluralFunc: func(ops *Operands) Plural { + // n = 0 + if ops.NequalsAny(0) { + return Zero + } + // n = 1 + if ops.NequalsAny(1) { + return One + } + // n = 2 + if ops.NequalsAny(2) { + return Two + } + // n % 100 = 3..10 + if ops.NmodInRange(100, 3, 10) { + return Few + } + // n % 100 = 11..99 + if ops.NmodInRange(100, 11, 99) { + return Many + } + return Other + }, + }) + RegisterPluralSpec([]string{"cy"}, &PluralSpec{ + Plurals: newPluralSet(Zero, One, Two, Few, Many, Other), + PluralFunc: func(ops *Operands) Plural { + // n = 0 + if ops.NequalsAny(0) { + return Zero + } + // n = 1 + if ops.NequalsAny(1) { + return One + } + // n = 2 + if ops.NequalsAny(2) { + return Two + } + // n = 3 + if ops.NequalsAny(3) { + return Few + } + // n = 6 + if ops.NequalsAny(6) { + return Many + } + return Other + }, + }) +} diff --git a/vendor/github.com/mattermost/go-i18n/i18n/translation/plural_translation.go b/vendor/github.com/mattermost/go-i18n/i18n/translation/plural_translation.go new file mode 100644 index 00000000..86e89962 --- /dev/null +++ b/vendor/github.com/mattermost/go-i18n/i18n/translation/plural_translation.go @@ -0,0 +1,82 @@ +package translation + +import ( + "github.com/mattermost/go-i18n/i18n/language" +) + +type pluralTranslation struct { + id string + templates map[language.Plural]*template +} + +func (pt *pluralTranslation) MarshalInterface() interface{} { + return map[string]interface{}{ + "id": pt.id, + "translation": pt.templates, + } +} + +func (pt *pluralTranslation) MarshalFlatInterface() interface{} { + return pt.templates +} + +func (pt *pluralTranslation) ID() string { + return pt.id +} + +func (pt *pluralTranslation) Template(pc language.Plural) *template { + return pt.templates[pc] +} + +func (pt *pluralTranslation) UntranslatedCopy() Translation { + return &pluralTranslation{pt.id, make(map[language.Plural]*template)} +} + +func (pt *pluralTranslation) Normalize(l *language.Language) Translation { + // Delete plural categories that don't belong to this language. + for pc := range pt.templates { + if _, ok := l.Plurals[pc]; !ok { + delete(pt.templates, pc) + } + } + // Create map entries for missing valid categories. + for pc := range l.Plurals { + if _, ok := pt.templates[pc]; !ok { + pt.templates[pc] = mustNewTemplate("") + } + } + return pt +} + +func (pt *pluralTranslation) Backfill(src Translation) Translation { + for pc, t := range pt.templates { + if (t == nil || t.src == "") && src != nil { + pt.templates[pc] = src.Template(language.Other) + } + } + return pt +} + +func (pt *pluralTranslation) Merge(t Translation) Translation { + other, ok := t.(*pluralTranslation) + if !ok || pt.ID() != t.ID() { + return t + } + for pluralCategory, template := range other.templates { + if template != nil && template.src != "" { + pt.templates[pluralCategory] = template + } + } + return pt +} + +func (pt *pluralTranslation) Incomplete(l *language.Language) bool { + for pc := range l.Plurals { + if t := pt.templates[pc]; t == nil || t.src == "" { + return true + } + } + return false +} + +var _ = Translation(&pluralTranslation{}) diff --git a/vendor/github.com/mattermost/go-i18n/i18n/translation/single_translation.go b/vendor/github.com/mattermost/go-i18n/i18n/translation/single_translation.go new file mode 100644 index 00000000..f2fab623 --- /dev/null +++ b/vendor/github.com/mattermost/go-i18n/i18n/translation/single_translation.go @@ -0,0 +1,61 @@ +package translation + +import ( + "github.com/mattermost/go-i18n/i18n/language" +) + +type singleTranslation struct { + id string + template *template +} + +func (st *singleTranslation) MarshalInterface() interface{} { + return map[string]interface{}{ + "id": st.id, + "translation": st.template, + } +} + +func (st *singleTranslation) MarshalFlatInterface() interface{} { + return map[string]interface{}{"other": st.template} +} + +func (st *singleTranslation) ID() string { + return st.id +} + +func (st *singleTranslation) Template(pc language.Plural) *template { + return st.template +} + +func (st *singleTranslation) UntranslatedCopy() Translation { + return &singleTranslation{st.id, mustNewTemplate("")} +} + +func (st *singleTranslation) Normalize(language *language.Language) Translation { + return st +} + +func (st *singleTranslation) Backfill(src Translation) Translation { + if (st.template == nil || st.template.src == "") && src != nil { + st.template = src.Template(language.Other) + } + return st +} + +func (st *singleTranslation) Merge(t Translation) Translation { + other, ok := t.(*singleTranslation) + if !ok || st.ID() != t.ID() { + return t + } + if other.template != nil && other.template.src != "" { + st.template = other.template + } + return st +} + +func (st *singleTranslation) Incomplete(l *language.Language) bool { + return st.template == nil || st.template.src == "" +} + +var _ = Translation(&singleTranslation{}) diff --git a/vendor/github.com/mattermost/go-i18n/i18n/translation/template.go b/vendor/github.com/mattermost/go-i18n/i18n/translation/template.go new file mode 100644 index 00000000..3310150c --- /dev/null +++ b/vendor/github.com/mattermost/go-i18n/i18n/translation/template.go @@ -0,0 +1,65 @@ +package translation + +import ( + "bytes" + "encoding" + "strings" + gotemplate "text/template" +) + +type template struct { + tmpl *gotemplate.Template + src string +} + +func newTemplate(src string) (*template, error) { + if src == "" { + return new(template), nil + } + + var tmpl template + err := tmpl.parseTemplate(src) + return &tmpl, err +} + +func mustNewTemplate(src string) *template { + t, err := newTemplate(src) + if err != nil { + panic(err) + } + return t +} + +func (t *template) String() string { + return t.src +} + +func (t *template) Execute(args interface{}) string { + if t.tmpl == nil { + return t.src + } + var buf bytes.Buffer + if err := t.tmpl.Execute(&buf, args); err != nil { + return err.Error() + } + return buf.String() +} + +func (t *template) MarshalText() ([]byte, error) { + return []byte(t.src), nil +} + +func (t *template) UnmarshalText(src []byte) error { + return t.parseTemplate(string(src)) +} + +func (t *template) parseTemplate(src string) (err error) { + t.src = src + if strings.Contains(src, "{{") { + t.tmpl, err = gotemplate.New(src).Parse(src) + } + return +} + +var _ = encoding.TextMarshaler(&template{}) +var _ = encoding.TextUnmarshaler(&template{}) diff --git a/vendor/github.com/mattermost/go-i18n/i18n/translation/translation.go b/vendor/github.com/mattermost/go-i18n/i18n/translation/translation.go new file mode 100644 index 00000000..9c7fd49e --- /dev/null +++ b/vendor/github.com/mattermost/go-i18n/i18n/translation/translation.go @@ -0,0 +1,84 @@ +// Package translation defines the interface for a translation. +package translation + +import ( + "fmt" + + "github.com/mattermost/go-i18n/i18n/language" +) + +// Translation is the interface that represents a translated string. +type Translation interface { + // MarshalInterface returns the object that should be used + // to serialize the translation. + MarshalInterface() interface{} + MarshalFlatInterface() interface{} + ID() string + Template(language.Plural) *template + UntranslatedCopy() Translation + Normalize(language *language.Language) Translation + Backfill(src Translation) Translation + Merge(Translation) Translation + Incomplete(l *language.Language) bool +} + +// SortableByID implements sort.Interface for a slice of translations. +type SortableByID []Translation + +func (a SortableByID) Len() int { return len(a) } +func (a SortableByID) Swap(i, j int) { a[i], a[j] = a[j], a[i] } +func (a SortableByID) Less(i, j int) bool { return a[i].ID() < a[j].ID() } + +// NewTranslation reflects on data to create a new Translation. +// +// data["id"] must be a string and data["translation"] must be either a string +// for a non-plural translation or a map[string]interface{} for a plural translation. +func NewTranslation(data map[string]interface{}) (Translation, error) { + id, ok := data["id"].(string) + if !ok { + return nil, fmt.Errorf(`missing "id" key`) + } + var pluralObject map[string]interface{} + switch translation := data["translation"].(type) { + case string: + tmpl, err := newTemplate(translation) + if err != nil { + return nil, err + } + return &singleTranslation{id, tmpl}, nil + case map[interface{}]interface{}: + // The YAML parser uses interface{} keys so we first convert them to string keys. + pluralObject = make(map[string]interface{}) + for k, v := range translation { + kstr, ok := k.(string) + if !ok { + return nil, fmt.Errorf(`invalid plural category type %T; expected string`, k) + } + pluralObject[kstr] = v + } + case map[string]interface{}: + pluralObject = translation + case nil: + return nil, fmt.Errorf(`missing "translation" key`) + default: + return nil, fmt.Errorf(`unsupported type for "translation" key %T`, translation) + } + + templates := make(map[language.Plural]*template, len(pluralObject)) + for k, v := range pluralObject { + pc, err := language.NewPlural(k) + if err != nil { + return nil, err + } + str, ok := v.(string) + if !ok { + return nil, fmt.Errorf(`plural category "%s" has value of type %T; expected string`, pc, v) + } + tmpl, err := newTemplate(str) + if err != nil { + return nil, err + } + templates[pc] = tmpl + } + return &pluralTranslation{id, templates}, nil +} diff --git a/vendor/github.com/mattermost/gosaml2/.gitignore b/vendor/github.com/mattermost/gosaml2/.gitignore new file mode 100644 index 00000000..9ed3b07c --- /dev/null +++ b/vendor/github.com/mattermost/gosaml2/.gitignore @@ -0,0 +1 @@ +*.test diff --git a/vendor/github.com/mattermost/gosaml2/.travis.yml b/vendor/github.com/mattermost/gosaml2/.travis.yml new file mode 100644 index 00000000..b9b0e6ae --- /dev/null +++ b/vendor/github.com/mattermost/gosaml2/.travis.yml @@ -0,0 +1,12 @@ +language: go + +go: + - 1.17 + - 1.16 + - 1.15 + - 1.14 + - tip + +matrix: + allow_failures: + - go: tip diff --git a/vendor/github.com/mattermost/gosaml2/LICENSE b/vendor/github.com/mattermost/gosaml2/LICENSE new file mode 100644 index 00000000..67db8588 --- /dev/null +++ b/vendor/github.com/mattermost/gosaml2/LICENSE @@ -0,0 +1,175 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. diff --git a/vendor/github.com/mattermost/gosaml2/README.md b/vendor/github.com/mattermost/gosaml2/README.md new file mode 100644 index 00000000..101b1e67 --- /dev/null +++ b/vendor/github.com/mattermost/gosaml2/README.md @@ -0,0 +1,34 @@ +# gosaml2 + +[![Build Status](https://github.com/mattermost/gosaml2/actions/workflows/test.yml/badge.svg?branch=main)](https://github.com/mattermost/gosaml2/actions/workflows/test.yml?query=branch%3Amain) +[![GoDoc](https://godoc.org/github.com/mattermost/gosaml2?status.svg)](https://godoc.org/github.com/mattermost/gosaml2) + +SAML 2.0 implemementation for Service Providers based on [etree](https://github.com/beevik/etree) +and [goxmldsig](https://github.com/russellhaering/goxmldsig), a pure Go +implementation of XML digital signatures. + +## Installation + +Install `gosaml2` into your `$GOPATH` using `go get`: + +``` +go get github.com/mattermost/gosaml2 +``` + +## Example + +See [demo.go](s2example/demo.go). + +## Supported Identity Providers + +This library is meant to be a generic SAML implementation. If you find a +standards compliant identity provider that it doesn't work with please +submit a bug or pull request. + +The following identity providers have been tested: + +* Okta +* Auth0 +* Shibboleth +* Ipsilon +* OneLogin diff --git a/vendor/github.com/mattermost/gosaml2/attribute.go b/vendor/github.com/mattermost/gosaml2/attribute.go new file mode 100644 index 00000000..6db01535 --- /dev/null +++ b/vendor/github.com/mattermost/gosaml2/attribute.go @@ -0,0 +1,66 @@ +// Copyright 2016 Russell Haering et al. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package saml2 + +import "github.com/mattermost/gosaml2/types" + +// Values is a convenience wrapper for a map of strings to Attributes, which +// can be used for easy access to the string values of Attribute lists. +type Values map[string]types.Attribute + +// Get is a safe method (nil maps will not panic) for returning the first value +// for an attribute at a key, or the empty string if none exists. +func (vals Values) Get(k string) string { + if vals == nil { + return "" + } + if v, ok := vals[k]; ok && len(v.Values) > 0 { + return string(v.Values[0].Value) + } + return "" +} + +//GetSize returns the number of values for an attribute at a key. +//Returns '0' in case of error or if key is not found. +func (vals Values) GetSize(k string) int { + if vals == nil { + return 0 + } + + v, ok := vals[k] + if ok { + return len(v.Values) + } + + return 0 +} + +//GetAll returns all the values for an attribute at a key. +//Returns an empty slice in case of error of if key is not found. +func (vals Values) GetAll(k string) []string { + var av []string + + if vals == nil { + return av + } + + if v, ok := vals[k]; ok && len(v.Values) > 0 { + for i := 0; i < len(v.Values); i++ { + av = append(av, string(v.Values[i].Value)) + } + } + + return av +} diff --git a/vendor/github.com/mattermost/gosaml2/authn_request.go b/vendor/github.com/mattermost/gosaml2/authn_request.go new file mode 100644 index 00000000..096b6b6c --- /dev/null +++ b/vendor/github.com/mattermost/gosaml2/authn_request.go @@ -0,0 +1,30 @@ +// Copyright 2016 Russell Haering et al. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package saml2 + +import "time" + +// AuthNRequest is the go struct representation of an authentication request +type AuthNRequest struct { + ID string `xml:",attr"` + Version string `xml:",attr"` + ProtocolBinding string `xml:",attr"` + AssertionConsumerServiceURL string `xml:",attr"` + + IssueInstant time.Time `xml:",attr"` + + Destination string `xml:",attr"` + Issuer string +} diff --git a/vendor/github.com/mattermost/gosaml2/build_logout_response.go b/vendor/github.com/mattermost/gosaml2/build_logout_response.go new file mode 100644 index 00000000..e42b6d30 --- /dev/null +++ b/vendor/github.com/mattermost/gosaml2/build_logout_response.go @@ -0,0 +1,158 @@ +// Copyright 2016 Russell Haering et al. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package saml2 + +import ( + "bytes" + "encoding/base64" + "html/template" + + "github.com/beevik/etree" + "github.com/mattermost/gosaml2/uuid" +) + +func (sp *SAMLServiceProvider) buildLogoutResponse(statusCodeValue string, reqID string, includeSig bool) (*etree.Document, error) { + logoutResponse := &etree.Element{ + Space: "samlp", + Tag: "LogoutResponse", + } + + logoutResponse.CreateAttr("xmlns:samlp", "urn:oasis:names:tc:SAML:2.0:protocol") + logoutResponse.CreateAttr("xmlns:saml", "urn:oasis:names:tc:SAML:2.0:assertion") + + arId := uuid.NewV4() + + logoutResponse.CreateAttr("ID", "_"+arId.String()) + logoutResponse.CreateAttr("Version", "2.0") + logoutResponse.CreateAttr("IssueInstant", sp.Clock.Now().UTC().Format(issueInstantFormat)) + logoutResponse.CreateAttr("Destination", sp.IdentityProviderSLOURL) + logoutResponse.CreateAttr("InResponseTo", reqID) + + // NOTE(russell_h): In earlier versions we mistakenly sent the IdentityProviderIssuer + // in the AuthnRequest. For backwards compatibility we will fall back to that + // behavior when ServiceProviderIssuer isn't set. + if sp.ServiceProviderIssuer != "" { + logoutResponse.CreateElement("saml:Issuer").SetText(sp.ServiceProviderIssuer) + } else { + logoutResponse.CreateElement("saml:Issuer").SetText(sp.IdentityProviderIssuer) + } + + status := logoutResponse.CreateElement("samlp:Status") + statusCode := status.CreateElement("samlp:StatusCode") + statusCode.CreateAttr("Value", statusCodeValue) + + doc := etree.NewDocument() + + // Only POST binding includes in (includeSig) + if includeSig { + signed, err := sp.SignLogoutResponse(logoutResponse) + if err != nil { + return nil, err + } + + doc.SetRoot(signed) + } else { + doc.SetRoot(logoutResponse) + } + return doc, nil +} +func (sp *SAMLServiceProvider) BuildLogoutResponseDocument(status string, reqID string) (*etree.Document, error) { + return sp.buildLogoutResponse(status, reqID, true) +} + +func (sp *SAMLServiceProvider) BuildLogoutResponseDocumentNoSig(status string, reqID string) (*etree.Document, error) { + return sp.buildLogoutResponse(status, reqID, false) +} + +func (sp *SAMLServiceProvider) SignLogoutResponse(el *etree.Element) (*etree.Element, error) { + ctx := sp.SigningContext() + + sig, err := ctx.ConstructSignature(el, true) + if err != nil { + return nil, err + } + + ret := el.Copy() + + var children []etree.Token + children = append(children, ret.Child[0]) // issuer is always first + children = append(children, sig) // next is the signature + children = append(children, ret.Child[1:]...) // then all other children + ret.Child = children + + return ret, nil +} + +func (sp *SAMLServiceProvider) buildLogoutResponseBodyPostFromDocument(relayState string, doc *etree.Document) ([]byte, error) { + respBuf, err := doc.WriteToBytes() + if err != nil { + return nil, err + } + + encodedRespBuf := base64.StdEncoding.EncodeToString(respBuf) + + var tmpl *template.Template + var rv bytes.Buffer + + if relayState != "" { + tmpl = template.Must(template.New("saml-post-form").Parse(`` + + `
` + + `` + + `` + + `` + + `
` + + `` + + `` + + ``)) + data := struct { + URL string + SAMLResponse string + RelayState string + }{ + URL: sp.IdentityProviderSLOURL, + SAMLResponse: encodedRespBuf, + RelayState: relayState, + } + if err = tmpl.Execute(&rv, data); err != nil { + return nil, err + } + } else { + tmpl = template.Must(template.New("saml-post-form").Parse(`` + + `
` + + `` + + `` + + `
` + + `` + + `` + + ``)) + data := struct { + URL string + SAMLResponse string + }{ + URL: sp.IdentityProviderSLOURL, + SAMLResponse: encodedRespBuf, + } + + if err = tmpl.Execute(&rv, data); err != nil { + return nil, err + } + } + + return rv.Bytes(), nil +} + +func (sp *SAMLServiceProvider) BuildLogoutResponseBodyPostFromDocument(relayState string, doc *etree.Document) ([]byte, error) { + return sp.buildLogoutResponseBodyPostFromDocument(relayState, doc) +} diff --git a/vendor/github.com/mattermost/gosaml2/build_request.go b/vendor/github.com/mattermost/gosaml2/build_request.go new file mode 100644 index 00000000..941d63b9 --- /dev/null +++ b/vendor/github.com/mattermost/gosaml2/build_request.go @@ -0,0 +1,559 @@ +// Copyright 2016 Russell Haering et al. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package saml2 + +import ( + "bytes" + "compress/flate" + "encoding/base64" + "fmt" + "html/template" + "net/http" + "net/url" + + "github.com/beevik/etree" + "github.com/mattermost/gosaml2/uuid" +) + +const issueInstantFormat = "2006-01-02T15:04:05Z" + +func (sp *SAMLServiceProvider) buildAuthnRequest(includeSig bool) (*etree.Document, error) { + authnRequest := &etree.Element{ + Space: "samlp", + Tag: "AuthnRequest", + } + + authnRequest.CreateAttr("xmlns:samlp", "urn:oasis:names:tc:SAML:2.0:protocol") + authnRequest.CreateAttr("xmlns:saml", "urn:oasis:names:tc:SAML:2.0:assertion") + + arId := uuid.NewV4() + + authnRequest.CreateAttr("ID", "_"+arId.String()) + authnRequest.CreateAttr("Version", "2.0") + authnRequest.CreateAttr("ProtocolBinding", "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST") + authnRequest.CreateAttr("AssertionConsumerServiceURL", sp.AssertionConsumerServiceURL) + authnRequest.CreateAttr("IssueInstant", sp.Clock.Now().UTC().Format(issueInstantFormat)) + authnRequest.CreateAttr("Destination", sp.IdentityProviderSSOURL) + + // NOTE(russell_h): In earlier versions we mistakenly sent the IdentityProviderIssuer + // in the AuthnRequest. For backwards compatibility we will fall back to that + // behavior when ServiceProviderIssuer isn't set. + if sp.ServiceProviderIssuer != "" { + authnRequest.CreateElement("saml:Issuer").SetText(sp.ServiceProviderIssuer) + } else { + authnRequest.CreateElement("saml:Issuer").SetText(sp.IdentityProviderIssuer) + } + + nameIdPolicy := authnRequest.CreateElement("samlp:NameIDPolicy") + nameIdPolicy.CreateAttr("AllowCreate", "true") + if sp.NameIdFormat != "" { + nameIdPolicy.CreateAttr("Format", sp.NameIdFormat) + } + + if sp.RequestedAuthnContext != nil { + requestedAuthnContext := authnRequest.CreateElement("samlp:RequestedAuthnContext") + requestedAuthnContext.CreateAttr("Comparison", sp.RequestedAuthnContext.Comparison) + + for _, context := range sp.RequestedAuthnContext.Contexts { + authnContextClassRef := requestedAuthnContext.CreateElement("saml:AuthnContextClassRef") + authnContextClassRef.SetText(context) + } + } + + if sp.ScopingIDPProviderId != "" && sp.ScopingIDPProviderName != "" { + scoping := authnRequest.CreateElement("samlp:Scoping") + idpList := scoping.CreateElement("samlp:IDPList") + idpEntry := idpList.CreateElement("samlp:IDPEntry") + idpEntry.CreateAttr("ProviderID", sp.ScopingIDPProviderId) + idpEntry.CreateAttr("Name", sp.ScopingIDPProviderName) + } + + doc := etree.NewDocument() + + // Only POST binding includes in (includeSig) + if sp.SignAuthnRequests && includeSig { + signed, err := sp.SignAuthnRequest(authnRequest) + if err != nil { + return nil, err + } + + doc.SetRoot(signed) + } else { + doc.SetRoot(authnRequest) + } + return doc, nil +} + +func (sp *SAMLServiceProvider) BuildAuthRequestDocument() (*etree.Document, error) { + return sp.buildAuthnRequest(true) +} + +func (sp *SAMLServiceProvider) BuildAuthRequestDocumentNoSig() (*etree.Document, error) { + return sp.buildAuthnRequest(false) +} + +// SignAuthnRequest takes a document, builds a signature, creates another document +// and inserts the signature in it. According to the schema, the position of the +// signature is right after the Issuer [1] then all other children. +// +// [1] https://docs.oasis-open.org/security/saml/v2.0/saml-schema-protocol-2.0.xsd +func (sp *SAMLServiceProvider) SignAuthnRequest(el *etree.Element) (*etree.Element, error) { + ctx := sp.SigningContext() + + sig, err := ctx.ConstructSignature(el, true) + if err != nil { + return nil, err + } + + ret := el.Copy() + + var children []etree.Token + children = append(children, ret.Child[0]) // issuer is always first + children = append(children, sig) // next is the signature + children = append(children, ret.Child[1:]...) // then all other children + ret.Child = children + + return ret, nil +} + +// BuildAuthRequest builds for identity provider +func (sp *SAMLServiceProvider) BuildAuthRequest() (string, error) { + doc, err := sp.BuildAuthRequestDocument() + if err != nil { + return "", err + } + return doc.WriteToString() +} + +func (sp *SAMLServiceProvider) buildAuthURLFromDocument(relayState, binding string, doc *etree.Document) (string, error) { + parsedUrl, err := url.Parse(sp.IdentityProviderSSOURL) + if err != nil { + return "", err + } + + authnRequest, err := doc.WriteToString() + if err != nil { + return "", err + } + + buf := &bytes.Buffer{} + + fw, err := flate.NewWriter(buf, flate.DefaultCompression) + if err != nil { + return "", fmt.Errorf("flate NewWriter error: %v", err) + } + + _, err = fw.Write([]byte(authnRequest)) + if err != nil { + return "", fmt.Errorf("flate.Writer Write error: %v", err) + } + + err = fw.Close() + if err != nil { + return "", fmt.Errorf("flate.Writer Close error: %v", err) + } + + qs := parsedUrl.Query() + + qs.Add("SAMLRequest", base64.StdEncoding.EncodeToString(buf.Bytes())) + + if relayState != "" { + qs.Add("RelayState", relayState) + } + + if sp.SignAuthnRequests && binding == BindingHttpRedirect { + // Sign URL encoded query (see Section 3.4.4.1 DEFLATE Encoding of saml-bindings-2.0-os.pdf) + ctx := sp.SigningContext() + qs.Add("SigAlg", ctx.GetSignatureMethodIdentifier()) + var rawSignature []byte + if rawSignature, err = ctx.SignString(signatureInputString(qs.Get("SAMLRequest"), qs.Get("RelayState"), qs.Get("SigAlg"))); err != nil { + return "", fmt.Errorf("unable to sign query string of redirect URL: %v", err) + } + + // Now add base64 encoded Signature + qs.Add("Signature", base64.StdEncoding.EncodeToString(rawSignature)) + } + + //Here the parameters may appear in any order. + parsedUrl.RawQuery = qs.Encode() + return parsedUrl.String(), nil +} + +func (sp *SAMLServiceProvider) BuildAuthURLFromDocument(relayState string, doc *etree.Document) (string, error) { + return sp.buildAuthURLFromDocument(relayState, BindingHttpPost, doc) +} + +func (sp *SAMLServiceProvider) BuildAuthURLRedirect(relayState string, doc *etree.Document) (string, error) { + return sp.buildAuthURLFromDocument(relayState, BindingHttpRedirect, doc) +} + +func (sp *SAMLServiceProvider) buildAuthBodyPostFromDocument(relayState string, doc *etree.Document) ([]byte, error) { + reqBuf, err := doc.WriteToBytes() + if err != nil { + return nil, err + } + + encodedReqBuf := base64.StdEncoding.EncodeToString(reqBuf) + + var tmpl *template.Template + var rv bytes.Buffer + + if relayState != "" { + tmpl = template.Must(template.New("saml-post-form").Parse(`` + + `
` + + `` + + `` + + `` + + `
` + + ``)) + + data := struct { + URL string + SAMLRequest string + RelayState string + }{ + URL: sp.IdentityProviderSSOURL, + SAMLRequest: encodedReqBuf, + RelayState: relayState, + } + if err = tmpl.Execute(&rv, data); err != nil { + return nil, err + } + } else { + tmpl = template.Must(template.New("saml-post-form").Parse(`` + + `
` + + `` + + `` + + `
` + + ``)) + + data := struct { + URL string + SAMLRequest string + }{ + URL: sp.IdentityProviderSSOURL, + SAMLRequest: encodedReqBuf, + } + if err = tmpl.Execute(&rv, data); err != nil { + return nil, err + } + } + + return rv.Bytes(), nil +} + +//BuildAuthBodyPost builds the POST body to be sent to IDP. +func (sp *SAMLServiceProvider) BuildAuthBodyPost(relayState string) ([]byte, error) { + var doc *etree.Document + var err error + + if sp.SignAuthnRequests { + doc, err = sp.BuildAuthRequestDocument() + } else { + doc, err = sp.BuildAuthRequestDocumentNoSig() + } + + if err != nil { + return nil, err + } + + return sp.buildAuthBodyPostFromDocument(relayState, doc) +} + +//BuildAuthBodyPostFromDocument builds the POST body to be sent to IDP. +//It takes the AuthnRequest xml as input. +func (sp *SAMLServiceProvider) BuildAuthBodyPostFromDocument(relayState string, doc *etree.Document) ([]byte, error) { + return sp.buildAuthBodyPostFromDocument(relayState, doc) +} + +// BuildAuthURL builds redirect URL to be sent to principal +func (sp *SAMLServiceProvider) BuildAuthURL(relayState string) (string, error) { + doc, err := sp.BuildAuthRequestDocument() + if err != nil { + return "", err + } + return sp.BuildAuthURLFromDocument(relayState, doc) +} + +// AuthRedirect takes a ResponseWriter and Request from an http interaction and +// redirects to the SAMLServiceProvider's configured IdP, including the +// relayState provided, if any. +func (sp *SAMLServiceProvider) AuthRedirect(w http.ResponseWriter, r *http.Request, relayState string) (err error) { + url, err := sp.BuildAuthURL(relayState) + if err != nil { + return err + } + + http.Redirect(w, r, url, http.StatusFound) + return nil +} + +func (sp *SAMLServiceProvider) buildLogoutRequest(includeSig bool, nameID string, sessionIndex string) (*etree.Document, error) { + logoutRequest := &etree.Element{ + Space: "samlp", + Tag: "LogoutRequest", + } + + logoutRequest.CreateAttr("xmlns:samlp", "urn:oasis:names:tc:SAML:2.0:protocol") + logoutRequest.CreateAttr("xmlns:saml", "urn:oasis:names:tc:SAML:2.0:assertion") + + arId := uuid.NewV4() + + logoutRequest.CreateAttr("ID", "_"+arId.String()) + logoutRequest.CreateAttr("Version", "2.0") + logoutRequest.CreateAttr("IssueInstant", sp.Clock.Now().UTC().Format(issueInstantFormat)) + logoutRequest.CreateAttr("Destination", sp.IdentityProviderSLOURL) + + // NOTE(russell_h): In earlier versions we mistakenly sent the IdentityProviderIssuer + // in the AuthnRequest. For backwards compatibility we will fall back to that + // behavior when ServiceProviderIssuer isn't set. + // TODO: Throw error in case Issuer is empty. + if sp.ServiceProviderIssuer != "" { + logoutRequest.CreateElement("saml:Issuer").SetText(sp.ServiceProviderIssuer) + } else { + logoutRequest.CreateElement("saml:Issuer").SetText(sp.IdentityProviderIssuer) + } + + nameId := logoutRequest.CreateElement("saml:NameID") + nameId.SetText(nameID) + nameId.CreateAttr("Format", sp.NameIdFormat) + + //Section 3.7.1 - http://docs.oasis-open.org/security/saml/v2.0/saml-core-2.0-os.pdf says + //SessionIndex is optional. If the IDP supports SLO then it must send SessionIndex as per + //Section 4.1.4.2 of https://docs.oasis-open.org/security/saml/v2.0/saml-profiles-2.0-os.pdf. + //As per section 4.4.3.1 of //docs.oasis-open.org/security/saml/v2.0/saml-profiles-2.0-os.pdf, + //a LogoutRequest issued by Session Participant to Identity Provider, must contain + //at least one SessionIndex element needs to be included. + nameId = logoutRequest.CreateElement("samlp:SessionIndex") + nameId.SetText(sessionIndex) + + doc := etree.NewDocument() + + if includeSig { + signed, err := sp.SignLogoutRequest(logoutRequest) + if err != nil { + return nil, err + } + + doc.SetRoot(signed) + } else { + doc.SetRoot(logoutRequest) + } + + return doc, nil +} + +func (sp *SAMLServiceProvider) SignLogoutRequest(el *etree.Element) (*etree.Element, error) { + ctx := sp.SigningContext() + + sig, err := ctx.ConstructSignature(el, true) + if err != nil { + return nil, err + } + + ret := el.Copy() + + var children []etree.Token + children = append(children, ret.Child[0]) // issuer is always first + children = append(children, sig) // next is the signature + children = append(children, ret.Child[1:]...) // then all other children + ret.Child = children + + return ret, nil +} + +func (sp *SAMLServiceProvider) BuildLogoutRequestDocumentNoSig(nameID string, sessionIndex string) (*etree.Document, error) { + return sp.buildLogoutRequest(false, nameID, sessionIndex) +} + +func (sp *SAMLServiceProvider) BuildLogoutRequestDocument(nameID string, sessionIndex string) (*etree.Document, error) { + return sp.buildLogoutRequest(true, nameID, sessionIndex) +} + +//BuildLogoutBodyPostFromDocument builds the POST body to be sent to IDP. +//It takes the LogoutRequest xml as input. +func (sp *SAMLServiceProvider) BuildLogoutBodyPostFromDocument(relayState string, doc *etree.Document) ([]byte, error) { + return sp.buildLogoutBodyPostFromDocument(relayState, doc) +} + +func (sp *SAMLServiceProvider) buildLogoutBodyPostFromDocument(relayState string, doc *etree.Document) ([]byte, error) { + reqBuf, err := doc.WriteToBytes() + if err != nil { + return nil, err + } + + encodedReqBuf := base64.StdEncoding.EncodeToString(reqBuf) + var tmpl *template.Template + var rv bytes.Buffer + + if relayState != "" { + tmpl = template.Must(template.New("saml-post-form").Parse(`` + + `
` + + `` + + `` + + `` + + `
` + + ``)) + + data := struct { + URL string + SAMLRequest string + RelayState string + }{ + URL: sp.IdentityProviderSLOURL, + SAMLRequest: encodedReqBuf, + RelayState: relayState, + } + if err = tmpl.Execute(&rv, data); err != nil { + return nil, err + } + } else { + tmpl = template.Must(template.New("saml-post-form").Parse(`` + + `
` + + `` + + `` + + `
` + + ``)) + + data := struct { + URL string + SAMLRequest string + }{ + URL: sp.IdentityProviderSLOURL, + SAMLRequest: encodedReqBuf, + } + if err = tmpl.Execute(&rv, data); err != nil { + return nil, err + } + } + + return rv.Bytes(), nil +} + +func (sp *SAMLServiceProvider) BuildLogoutURLRedirect(relayState string, doc *etree.Document) (string, error) { + return sp.buildLogoutURLFromDocument(relayState, BindingHttpRedirect, doc) +} + +func (sp *SAMLServiceProvider) buildLogoutURLFromDocument(relayState, binding string, doc *etree.Document) (string, error) { + parsedUrl, err := url.Parse(sp.IdentityProviderSLOURL) + if err != nil { + return "", err + } + + logoutRequest, err := doc.WriteToString() + if err != nil { + return "", err + } + + buf := &bytes.Buffer{} + + fw, err := flate.NewWriter(buf, flate.DefaultCompression) + if err != nil { + return "", fmt.Errorf("flate NewWriter error: %v", err) + } + + _, err = fw.Write([]byte(logoutRequest)) + if err != nil { + return "", fmt.Errorf("flate.Writer Write error: %v", err) + } + + err = fw.Close() + if err != nil { + return "", fmt.Errorf("flate.Writer Close error: %v", err) + } + + qs := parsedUrl.Query() + + qs.Add("SAMLRequest", base64.StdEncoding.EncodeToString(buf.Bytes())) + + if relayState != "" { + qs.Add("RelayState", relayState) + } + + if binding == BindingHttpRedirect { + // Sign URL encoded query (see Section 3.4.4.1 DEFLATE Encoding of saml-bindings-2.0-os.pdf) + ctx := sp.SigningContext() + qs.Add("SigAlg", ctx.GetSignatureMethodIdentifier()) + var rawSignature []byte + //qs.Encode() sorts the keys (See https://golang.org/pkg/net/url/#Values.Encode). + //If RelayState parameter is present then RelayState parameter + //will be put first by Encode(). Hence encode them separately and concatenate. + //Signature string has to have parameters in the order - SAMLRequest=value&RelayState=value&SigAlg=value. + //(See Section 3.4.4.1 saml-bindings-2.0-os.pdf). + var orderedParams = []string{"SAMLRequest", "RelayState", "SigAlg"} + + var paramValueMap = make(map[string]string) + paramValueMap["SAMLRequest"] = base64.StdEncoding.EncodeToString(buf.Bytes()) + if relayState != "" { + paramValueMap["RelayState"] = relayState + } + paramValueMap["SigAlg"] = ctx.GetSignatureMethodIdentifier() + + ss := "" + + for _, k := range orderedParams { + v, ok := paramValueMap[k] + if ok { + //Add the value after URL encoding. + u := url.Values{} + u.Add(k, v) + e := u.Encode() + if ss != "" { + ss += "&" + e + } else { + ss = e + } + } + } + + //Now generate the signature on the string of ordered parameters. + if rawSignature, err = ctx.SignString(ss); err != nil { + return "", fmt.Errorf("unable to sign query string of redirect URL: %v", err) + } + + // Now add base64 encoded Signature + qs.Add("Signature", base64.StdEncoding.EncodeToString(rawSignature)) + } + + //Here the parameters may appear in any order. + parsedUrl.RawQuery = qs.Encode() + return parsedUrl.String(), nil +} + +// signatureInputString constructs the string to be fed into the signature algorithm, as described +// in section 3.4.4.1 of +// https://www.oasis-open.org/committees/download.php/56779/sstc-saml-bindings-errata-2.0-wd-06.pdf +func signatureInputString(samlRequest, relayState, sigAlg string) string { + var params [][2]string + if relayState == "" { + params = [][2]string{{"SAMLRequest", samlRequest}, {"SigAlg", sigAlg}} + } else { + params = [][2]string{{"SAMLRequest", samlRequest}, {"RelayState", relayState}, {"SigAlg", sigAlg}} + } + + var buf bytes.Buffer + for _, kv := range params { + k, v := kv[0], kv[1] + if buf.Len() > 0 { + buf.WriteByte('&') + } + buf.WriteString(url.QueryEscape(k) + "=" + url.QueryEscape(v)) + } + return buf.String() +} diff --git a/vendor/github.com/mattermost/gosaml2/decode_logout_request.go b/vendor/github.com/mattermost/gosaml2/decode_logout_request.go new file mode 100644 index 00000000..57c7eb8f --- /dev/null +++ b/vendor/github.com/mattermost/gosaml2/decode_logout_request.go @@ -0,0 +1,85 @@ +// Copyright 2016 Russell Haering et al. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package saml2 + +import ( + "encoding/base64" + "fmt" + + dsig "github.com/russellhaering/goxmldsig" +) + +func (sp *SAMLServiceProvider) validateLogoutRequestAttributes(request *LogoutRequest) error { + if request.Destination != "" && request.Destination != sp.ServiceProviderSLOURL { + return ErrInvalidValue{ + Key: DestinationAttr, + Expected: sp.ServiceProviderSLOURL, + Actual: request.Destination, + } + } + + if request.Version != "2.0" { + return ErrInvalidValue{ + Reason: ReasonUnsupported, + Key: "SAML version", + Expected: "2.0", + Actual: request.Version, + } + } + + return nil +} + +func (sp *SAMLServiceProvider) ValidateEncodedLogoutRequestPOST(encodedRequest string) (*LogoutRequest, error) { + raw, err := base64.StdEncoding.DecodeString(encodedRequest) + if err != nil { + return nil, err + } + + // Parse the raw request - parseResponse is generic + doc, el, err := parseResponse(raw, sp.MaximumDecompressedBodySize) + if err != nil { + return nil, err + } + + var requestSignatureValidated bool + if !sp.SkipSignatureValidation { + el, err = sp.validateElementSignature(el) + if err == dsig.ErrMissingSignature { + // Unfortunately we just blew away our Response + el = doc.Root() + } else if err != nil { + return nil, err + } else if el == nil { + return nil, fmt.Errorf("missing transformed logout request") + } else { + requestSignatureValidated = true + } + } + + decodedRequest := &LogoutRequest{} + err = xmlUnmarshalElement(el, decodedRequest) + if err != nil { + return nil, fmt.Errorf("unable to unmarshal logout request: %v", err) + } + decodedRequest.SignatureValidated = requestSignatureValidated + + err = sp.ValidateDecodedLogoutRequest(decodedRequest) + if err != nil { + return nil, err + } + + return decodedRequest, nil +} diff --git a/vendor/github.com/mattermost/gosaml2/decode_response.go b/vendor/github.com/mattermost/gosaml2/decode_response.go new file mode 100644 index 00000000..a924322c --- /dev/null +++ b/vendor/github.com/mattermost/gosaml2/decode_response.go @@ -0,0 +1,478 @@ +// Copyright 2016 Russell Haering et al. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package saml2 + +import ( + "bytes" + "compress/flate" + "crypto/tls" + "crypto/x509" + "encoding/base64" + "fmt" + "io" + + "encoding/xml" + + "github.com/beevik/etree" + "github.com/mattermost/gosaml2/types" + rtvalidator "github.com/mattermost/xml-roundtrip-validator" + dsig "github.com/russellhaering/goxmldsig" + "github.com/russellhaering/goxmldsig/etreeutils" +) + +const ( + defaultMaxDecompressedResponseSize = 5 * 1024 * 1024 +) + +func (sp *SAMLServiceProvider) validationContext() *dsig.ValidationContext { + ctx := dsig.NewDefaultValidationContext(sp.IDPCertificateStore) + ctx.Clock = sp.Clock + return ctx +} + +// validateResponseAttributes validates a SAML Response's tag and attributes. It does +// not inspect child elements of the Response at all. +func (sp *SAMLServiceProvider) validateResponseAttributes(response *types.Response) error { + if response.Destination != "" && response.Destination != sp.AssertionConsumerServiceURL { + return ErrInvalidValue{ + Key: DestinationAttr, + Expected: sp.AssertionConsumerServiceURL, + Actual: response.Destination, + } + } + + if response.Version != "2.0" { + return ErrInvalidValue{ + Reason: ReasonUnsupported, + Key: "SAML version", + Expected: "2.0", + Actual: response.Version, + } + } + + return nil +} + +// validateLogoutResponseAttributes validates a SAML Response's tag and attributes. It does +// not inspect child elements of the Response at all. +func (sp *SAMLServiceProvider) validateLogoutResponseAttributes(response *types.LogoutResponse) error { + if response.Destination != "" && response.Destination != sp.ServiceProviderSLOURL { + return ErrInvalidValue{ + Key: DestinationAttr, + Expected: sp.ServiceProviderSLOURL, + Actual: response.Destination, + } + } + + if response.Version != "2.0" { + return ErrInvalidValue{ + Reason: ReasonUnsupported, + Key: "SAML version", + Expected: "2.0", + Actual: response.Version, + } + } + + return nil +} + +func xmlUnmarshalElement(el *etree.Element, obj interface{}) error { + doc := etree.NewDocument() + doc.SetRoot(el) + data, err := doc.WriteToBytes() + if err != nil { + return err + } + + err = xml.Unmarshal(data, obj) + if err != nil { + return err + } + return nil +} + +func (sp *SAMLServiceProvider) getDecryptCert() (*tls.Certificate, error) { + if sp.SPKeyStore == nil { + return nil, fmt.Errorf("no decryption certs available") + } + + //This is the tls.Certificate we'll use to decrypt any encrypted assertions + var decryptCert tls.Certificate + + switch crt := sp.SPKeyStore.(type) { + case dsig.TLSCertKeyStore: + // Get the tls.Certificate directly if possible + decryptCert = tls.Certificate(crt) + + default: + + //Otherwise, construct one from the results of GetKeyPair + pk, cert, err := sp.SPKeyStore.GetKeyPair() + if err != nil { + return nil, fmt.Errorf("error getting keypair: %v", err) + } + + decryptCert = tls.Certificate{ + Certificate: [][]byte{cert}, + PrivateKey: pk, + } + } + + if sp.ValidateEncryptionCert { + // Check Validity period of certificate + if len(decryptCert.Certificate) < 1 || len(decryptCert.Certificate[0]) < 1 { + return nil, fmt.Errorf("empty decryption cert") + } else if cert, err := x509.ParseCertificate(decryptCert.Certificate[0]); err != nil { + return nil, fmt.Errorf("invalid x509 decryption cert: %v", err) + } else { + now := sp.Clock.Now() + if now.Before(cert.NotBefore) || now.After(cert.NotAfter) { + return nil, fmt.Errorf("decryption cert is not valid at this time") + } + } + } + + return &decryptCert, nil +} + +func (sp *SAMLServiceProvider) decryptAssertions(el *etree.Element) error { + var decryptCert *tls.Certificate + + decryptAssertion := func(ctx etreeutils.NSContext, encryptedElement *etree.Element) error { + if encryptedElement.Parent() != el { + return fmt.Errorf("found encrypted assertion with unexpected parent element: %s", encryptedElement.Parent().Tag) + } + + detached, err := etreeutils.NSDetatch(ctx, encryptedElement) // make a detached copy + if err != nil { + return fmt.Errorf("unable to detach encrypted assertion: %v", err) + } + + encryptedAssertion := &types.EncryptedAssertion{} + err = xmlUnmarshalElement(detached, encryptedAssertion) + if err != nil { + return fmt.Errorf("unable to unmarshal encrypted assertion: %v", err) + } + + if decryptCert == nil { + decryptCert, err = sp.getDecryptCert() + if err != nil { + return fmt.Errorf("unable to get decryption certificate: %v", err) + } + } + + raw, derr := encryptedAssertion.DecryptBytes(decryptCert) + if derr != nil { + return fmt.Errorf("unable to decrypt encrypted assertion: %v", derr) + } + + doc, _, err := parseResponse(raw, sp.MaximumDecompressedBodySize) + if err != nil { + return fmt.Errorf("unable to create element from decrypted assertion bytes: %v", derr) + } + + // Replace the original encrypted assertion with the decrypted one. + if el.RemoveChild(encryptedElement) == nil { + // Out of an abundance of caution, make sure removed worked + panic("unable to remove encrypted assertion") + } + + el.AddChild(doc.Root()) + return nil + } + + return etreeutils.NSFindIterate(el, SAMLAssertionNamespace, EncryptedAssertionTag, decryptAssertion) +} + +func (sp *SAMLServiceProvider) validateElementSignature(el *etree.Element) (*etree.Element, error) { + return sp.validationContext().Validate(el) +} + +func (sp *SAMLServiceProvider) validateAssertionSignatures(el *etree.Element) error { + signedAssertions := 0 + unsignedAssertions := 0 + validateAssertion := func(ctx etreeutils.NSContext, unverifiedAssertion *etree.Element) error { + parent := unverifiedAssertion.Parent() + if parent == nil { + return fmt.Errorf("parent is nil") + } + if parent != el { + return fmt.Errorf("found assertion with unexpected parent element: %s", unverifiedAssertion.Parent().Tag) + } + + detached, err := etreeutils.NSDetatch(ctx, unverifiedAssertion) // make a detached copy + if err != nil { + return fmt.Errorf("unable to detach unverified assertion: %v", err) + } + + assertion, err := sp.validationContext().Validate(detached) + if err == dsig.ErrMissingSignature { + unsignedAssertions++ + return nil + } else if err != nil { + return err + } + + // Replace the original unverified Assertion with the verified one. Note that + // if the Response is not signed, only signed Assertions (and not the parent Response) can be trusted. + if el.RemoveChild(unverifiedAssertion) == nil { + // Out of an abundance of caution, check to make sure an Assertion was actually + // removed. If it wasn't a programming error has occurred. + panic("unable to remove assertion") + } + + el.AddChild(assertion) + signedAssertions++ + + return nil + } + + if err := etreeutils.NSFindIterate(el, SAMLAssertionNamespace, AssertionTag, validateAssertion); err != nil { + return err + } else if signedAssertions > 0 && unsignedAssertions > 0 { + return fmt.Errorf("invalid to have both signed and unsigned assertions") + } else if signedAssertions < 1 { + return dsig.ErrMissingSignature + } else { + return nil + } +} + +// ValidateEncodedResponse both decodes and validates, based on SP +// configuration, an encoded, signed response. It will also appropriately +// decrypt a response if the assertion was encrypted +func (sp *SAMLServiceProvider) ValidateEncodedResponse(encodedResponse string) (*types.Response, error) { + raw, err := base64.StdEncoding.DecodeString(encodedResponse) + if err != nil { + return nil, err + } + + // Parse the raw response + doc, el, err := parseResponse(raw, sp.MaximumDecompressedBodySize) + if err != nil { + return nil, err + } + + elAssertion, err := etreeutils.NSFindOne(el, SAMLAssertionNamespace, AssertionTag) + if err != nil { + return nil, err + } + elEncAssertion, err := etreeutils.NSFindOne(el, SAMLAssertionNamespace, EncryptedAssertionTag) + if err != nil { + return nil, err + } + // We verify that either one of assertion or encrypted assertion elements are present, + // but not both. + if (elAssertion == nil) == (elEncAssertion == nil) { + return nil, fmt.Errorf("found both or no assertion and encrypted assertion elements") + } + // And if a decryptCert is present, then it's only encrypted assertion elements. + if sp.SPKeyStore != nil && elAssertion != nil { + return nil, fmt.Errorf("all assertions are not encrypted") + } + + var responseSignatureValidated bool + if !sp.SkipSignatureValidation { + el, err = sp.validateElementSignature(el) + if err == dsig.ErrMissingSignature { + // Unfortunately we just blew away our Response + el = doc.Root() + } else if err != nil { + return nil, err + } else if el == nil { + return nil, fmt.Errorf("missing transformed response") + } else { + responseSignatureValidated = true + } + } + + err = sp.decryptAssertions(el) + if err != nil { + return nil, err + } + + var assertionSignaturesValidated bool + if !sp.SkipSignatureValidation { + err = sp.validateAssertionSignatures(el) + if err == dsig.ErrMissingSignature { + if !responseSignatureValidated { + return nil, fmt.Errorf("response and/or assertions must be signed") + } + } else if err != nil { + return nil, err + } else { + assertionSignaturesValidated = true + } + } + + decodedResponse := &types.Response{} + err = xmlUnmarshalElement(el, decodedResponse) + if err != nil { + return nil, fmt.Errorf("unable to unmarshal response: %v", err) + } + decodedResponse.SignatureValidated = responseSignatureValidated + if assertionSignaturesValidated { + for idx := 0; idx < len(decodedResponse.Assertions); idx++ { + decodedResponse.Assertions[idx].SignatureValidated = true + } + } + + err = sp.Validate(decodedResponse) + if err != nil { + return nil, err + } + + return decodedResponse, nil +} + +// DecodeUnverifiedBaseResponse decodes several attributes from a SAML response for the purpose +// of determining how to validate the response. This is useful for Service Providers which +// expose a single Assertion Consumer Service URL but consume Responses from many IdPs. +func DecodeUnverifiedBaseResponse(encodedResponse string) (*types.UnverifiedBaseResponse, error) { + raw, err := base64.StdEncoding.DecodeString(encodedResponse) + if err != nil { + return nil, err + } + + var response *types.UnverifiedBaseResponse + + err = maybeDeflate(raw, defaultMaxDecompressedResponseSize, func(maybeXML []byte) error { + response = &types.UnverifiedBaseResponse{} + return xml.Unmarshal(maybeXML, response) + }) + if err != nil { + return nil, err + } + + return response, nil +} + +// maybeDeflate invokes the passed decoder over the passed data. If an error is +// returned, it then attempts to deflate the passed data before re-invoking +// the decoder over the deflated data. +func maybeDeflate(data []byte, maxSize int64, decoder func([]byte) error) error { + err := decoder(data) + if err == nil { + return nil + } + + // Default to 5MB max size + if maxSize == 0 { + maxSize = defaultMaxDecompressedResponseSize + } + + lr := io.LimitReader(flate.NewReader(bytes.NewReader(data)), maxSize+1) + + deflated, err := io.ReadAll(lr) + if err != nil { + return err + } + + if int64(len(deflated)) > maxSize { + return fmt.Errorf("deflated response exceeds maximum size of %d bytes", maxSize) + } + + return decoder(deflated) +} + +// parseResponse is a helper function that was refactored out so that the XML parsing behavior can be isolated and unit tested +func parseResponse(xml []byte, maxSize int64) (*etree.Document, *etree.Element, error) { + var doc *etree.Document + var rawXML []byte + + err := maybeDeflate(xml, maxSize, func(xml []byte) error { + doc = etree.NewDocument() + rawXML = xml + return doc.ReadFromBytes(xml) + }) + if err != nil { + return nil, nil, err + } + + el := doc.Root() + if el == nil { + return nil, nil, fmt.Errorf("unable to parse response") + } + + // Examine the response for attempts to exploit weaknesses in Go's encoding/xml + err = rtvalidator.Validate(bytes.NewReader(rawXML)) + if err != nil { + return nil, nil, err + } + + return doc, el, nil +} + +// DecodeUnverifiedLogoutResponse decodes several attributes from a SAML Logout response, without doing any verifications. +func DecodeUnverifiedLogoutResponse(encodedResponse string) (*types.LogoutResponse, error) { + raw, err := base64.StdEncoding.DecodeString(encodedResponse) + if err != nil { + return nil, err + } + + var response *types.LogoutResponse + + err = maybeDeflate(raw, defaultMaxDecompressedResponseSize, func(maybeXML []byte) error { + response = &types.LogoutResponse{} + return xml.Unmarshal(maybeXML, response) + }) + if err != nil { + return nil, err + } + + return response, nil +} + +func (sp *SAMLServiceProvider) ValidateEncodedLogoutResponsePOST(encodedResponse string) (*types.LogoutResponse, error) { + raw, err := base64.StdEncoding.DecodeString(encodedResponse) + if err != nil { + return nil, err + } + + // Parse the raw response + doc, el, err := parseResponse(raw, sp.MaximumDecompressedBodySize) + if err != nil { + return nil, err + } + + var responseSignatureValidated bool + if !sp.SkipSignatureValidation { + el, err = sp.validateElementSignature(el) + if err == dsig.ErrMissingSignature { + // Unfortunately we just blew away our Response + el = doc.Root() + } else if err != nil { + return nil, err + } else if el == nil { + return nil, fmt.Errorf("missing transformed logout response") + } else { + responseSignatureValidated = true + } + } + + decodedResponse := &types.LogoutResponse{} + err = xmlUnmarshalElement(el, decodedResponse) + if err != nil { + return nil, fmt.Errorf("unable to unmarshal logout response: %v", err) + } + decodedResponse.SignatureValidated = responseSignatureValidated + + err = sp.ValidateDecodedLogoutResponse(decodedResponse) + if err != nil { + return nil, err + } + + return decodedResponse, nil +} diff --git a/vendor/github.com/mattermost/gosaml2/logout_request.go b/vendor/github.com/mattermost/gosaml2/logout_request.go new file mode 100644 index 00000000..7dd4d35f --- /dev/null +++ b/vendor/github.com/mattermost/gosaml2/logout_request.go @@ -0,0 +1,37 @@ +// Copyright 2016 Russell Haering et al. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package saml2 + +import ( + "encoding/xml" + "github.com/mattermost/gosaml2/types" + "time" +) + +// LogoutRequest is the go struct representation of a logout request +type LogoutRequest struct { + XMLName xml.Name `xml:"urn:oasis:names:tc:SAML:2.0:protocol LogoutRequest"` + ID string `xml:"ID,attr"` + Version string `xml:"Version,attr"` + //ProtocolBinding string `xml:",attr"` + + IssueInstant time.Time `xml:"IssueInstant,attr"` + + Destination string `xml:"Destination,attr"` + Issuer *types.Issuer `xml:"Issuer"` + + NameID *types.NameID `xml:"NameID"` + SignatureValidated bool `xml:"-"` // not read, not dumped +} diff --git a/vendor/github.com/mattermost/gosaml2/retrieve_assertion.go b/vendor/github.com/mattermost/gosaml2/retrieve_assertion.go new file mode 100644 index 00000000..af70aa6d --- /dev/null +++ b/vendor/github.com/mattermost/gosaml2/retrieve_assertion.go @@ -0,0 +1,111 @@ +// Copyright 2016 Russell Haering et al. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package saml2 + +import "fmt" + +//ErrMissingElement is the error type that indicates an element and/or attribute is +//missing. It provides a structured error that can be more appropriately acted +//upon. +type ErrMissingElement struct { + Tag, Attribute string +} + +type ErrVerification struct { + Cause error +} + +func (e ErrVerification) Error() string { + return fmt.Sprintf("error validating response: %s", e.Cause.Error()) +} + +//ErrMissingAssertion indicates that an appropriate assertion element could not +//be found in the SAML Response +var ( + ErrMissingAssertion = ErrMissingElement{Tag: AssertionTag} +) + +func (e ErrMissingElement) Error() string { + if e.Attribute != "" { + return fmt.Sprintf("missing %s attribute on %s element", e.Attribute, e.Tag) + } + return fmt.Sprintf("missing %s element", e.Tag) +} + +//RetrieveAssertionInfo takes an encoded response and returns the AssertionInfo +//contained, or an error message if an error has been encountered. +func (sp *SAMLServiceProvider) RetrieveAssertionInfo(encodedResponse string) (*AssertionInfo, error) { + assertionInfo := &AssertionInfo{ + Values: make(Values), + } + + response, err := sp.ValidateEncodedResponse(encodedResponse) + if err != nil { + return nil, ErrVerification{Cause: err} + } + + // TODO: Support multiple assertions + if len(response.Assertions) == 0 { + return nil, ErrMissingAssertion + } + + assertion := response.Assertions[0] + assertionInfo.Assertions = response.Assertions + assertionInfo.ResponseSignatureValidated = response.SignatureValidated + + warningInfo, err := sp.VerifyAssertionConditions(&assertion) + if err != nil { + return nil, err + } + + //Get the NameID + subject := assertion.Subject + if subject == nil { + return nil, ErrMissingElement{Tag: SubjectTag} + } + + nameID := subject.NameID + if nameID == nil { + return nil, ErrMissingElement{Tag: NameIdTag} + } + + assertionInfo.NameID = nameID.Value + + //Get the actual assertion attributes + attributeStatement := assertion.AttributeStatement + if attributeStatement == nil && !sp.AllowMissingAttributes { + return nil, ErrMissingElement{Tag: AttributeStatementTag} + } + + if attributeStatement != nil { + for _, attribute := range attributeStatement.Attributes { + assertionInfo.Values[attribute.Name] = attribute + } + } + + if assertion.AuthnStatement != nil { + if assertion.AuthnStatement.AuthnInstant != nil { + assertionInfo.AuthnInstant = assertion.AuthnStatement.AuthnInstant + } + if assertion.AuthnStatement.SessionNotOnOrAfter != nil { + assertionInfo.SessionNotOnOrAfter = assertion.AuthnStatement.SessionNotOnOrAfter + } + + assertionInfo.SessionIndex = assertion.AuthnStatement.SessionIndex + } + + assertionInfo.WarningInfo = warningInfo + return assertionInfo, nil +} diff --git a/vendor/github.com/mattermost/gosaml2/run_test.sh b/vendor/github.com/mattermost/gosaml2/run_test.sh new file mode 100644 index 00000000..cfe5b2ea --- /dev/null +++ b/vendor/github.com/mattermost/gosaml2/run_test.sh @@ -0,0 +1,12 @@ +#!/bin/bash +cd `dirname $0` +DIRS=`git grep -l 'func Test' | xargs dirname | sort -u` +for DIR in $DIRS +do + echo + echo "dir: $DIR" + echo "======================================" + pushd $DIR >/dev/null + go test -v || exit 1 + popd >/dev/null +done diff --git a/vendor/github.com/mattermost/gosaml2/saml.go b/vendor/github.com/mattermost/gosaml2/saml.go new file mode 100644 index 00000000..94434417 --- /dev/null +++ b/vendor/github.com/mattermost/gosaml2/saml.go @@ -0,0 +1,291 @@ +// Copyright 2016 Russell Haering et al. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package saml2 + +import ( + "encoding/base64" + "sync" + "time" + + "github.com/mattermost/gosaml2/types" + dsig "github.com/russellhaering/goxmldsig" + dsigtypes "github.com/russellhaering/goxmldsig/types" +) + +type ErrSaml struct { + Message string + System error +} + +func (serr ErrSaml) Error() string { + if serr.Message != "" { + return serr.Message + } + return "SAML error" +} + +type SAMLServiceProvider struct { + IdentityProviderSSOURL string + IdentityProviderSSOBinding string + IdentityProviderSLOURL string + IdentityProviderSLOBinding string + IdentityProviderIssuer string + + AssertionConsumerServiceURL string + ServiceProviderSLOURL string + ServiceProviderIssuer string + + SignAuthnRequests bool + SignAuthnRequestsAlgorithm string + SignAuthnRequestsCanonicalizer dsig.Canonicalizer + + // RequestedAuthnContext allows service providers to require that the identity + // provider use specific authentication mechanisms. Leaving this unset will + // permit the identity provider to choose the auth method. To maximize compatibility + // with identity providers it is recommended to leave this unset. + RequestedAuthnContext *RequestedAuthnContext + AudienceURI string + IDPCertificateStore dsig.X509CertificateStore + SPKeyStore dsig.X509KeyStore // Required encryption key, default signing key + SPSigningKeyStore dsig.X509KeyStore // Optional signing key + NameIdFormat string + ValidateEncryptionCert bool + SkipSignatureValidation bool + AllowMissingAttributes bool + ScopingIDPProviderId string + ScopingIDPProviderName string + Clock *dsig.Clock + + // MaximumDecompressedBodySize is the maximum size to which a compressed + // SAML document will be decompressed. If a compresed document is exceeds + // this size during decompression an error will be returned. + MaximumDecompressedBodySize int64 + + signingContextMu sync.RWMutex + signingContext *dsig.SigningContext +} + +// RequestedAuthnContext controls which authentication mechanisms are requested of +// the identity provider. It is generally sufficient to omit this and let the +// identity provider select an authentication mechansim. +type RequestedAuthnContext struct { + // The RequestedAuthnContext comparison policy to use. See the section 3.3.2.2.1 + // of the SAML 2.0 specification for details. Constants named AuthnPolicyMatch* + // contain standardized values. + Comparison string + + // Contexts will be passed as AuthnContextClassRefs. For example, to force password + // authentication on some identity providers, Contexts should have a value of + // []string{AuthnContextPasswordProtectedTransport}, and Comparison should have a + // value of AuthnPolicyMatchExact. + Contexts []string +} + +func (sp *SAMLServiceProvider) Metadata() (*types.EntityDescriptor, error) { + keyDescriptors := make([]types.KeyDescriptor, 0, 2) + if sp.GetSigningKey() != nil { + signingCertBytes, err := sp.GetSigningCertBytes() + if err != nil { + return nil, err + } + keyDescriptors = append(keyDescriptors, types.KeyDescriptor{ + Use: "signing", + KeyInfo: dsigtypes.KeyInfo{ + X509Data: dsigtypes.X509Data{ + X509Certificates: []dsigtypes.X509Certificate{{ + Data: base64.StdEncoding.EncodeToString(signingCertBytes), + }}, + }, + }, + }) + } + if sp.GetEncryptionKey() != nil { + encryptionCertBytes, err := sp.GetEncryptionCertBytes() + if err != nil { + return nil, err + } + keyDescriptors = append(keyDescriptors, types.KeyDescriptor{ + Use: "encryption", + KeyInfo: dsigtypes.KeyInfo{ + X509Data: dsigtypes.X509Data{ + X509Certificates: []dsigtypes.X509Certificate{{ + Data: base64.StdEncoding.EncodeToString(encryptionCertBytes), + }}, + }, + }, + EncryptionMethods: []types.EncryptionMethod{ + {Algorithm: types.MethodAES128GCM}, + {Algorithm: types.MethodAES192GCM}, + {Algorithm: types.MethodAES256GCM}, + {Algorithm: types.MethodAES128CBC}, + {Algorithm: types.MethodAES256CBC}, + }, + }) + } + return &types.EntityDescriptor{ + ValidUntil: time.Now().UTC().Add(time.Hour * 24 * 7), // 7 days + EntityID: sp.ServiceProviderIssuer, + SPSSODescriptor: &types.SPSSODescriptor{ + AuthnRequestsSigned: sp.SignAuthnRequests, + WantAssertionsSigned: !sp.SkipSignatureValidation, + ProtocolSupportEnumeration: SAMLProtocolNamespace, + KeyDescriptors: keyDescriptors, + AssertionConsumerServices: []types.IndexedEndpoint{{ + Binding: BindingHttpPost, + Location: sp.AssertionConsumerServiceURL, + Index: 1, + }}, + }, + }, nil +} + +func (sp *SAMLServiceProvider) MetadataWithSLO(validityHours int64) (*types.EntityDescriptor, error) { + signingCertBytes, err := sp.GetSigningCertBytes() + if err != nil { + return nil, err + } + encryptionCertBytes, err := sp.GetEncryptionCertBytes() + if err != nil { + return nil, err + } + + if validityHours <= 0 { + //By default let's keep it to 7 days. + validityHours = int64(time.Hour * 24 * 7) + } + + return &types.EntityDescriptor{ + ValidUntil: time.Now().UTC().Add(time.Duration(validityHours)), // default 7 days + EntityID: sp.ServiceProviderIssuer, + SPSSODescriptor: &types.SPSSODescriptor{ + AuthnRequestsSigned: sp.SignAuthnRequests, + WantAssertionsSigned: !sp.SkipSignatureValidation, + ProtocolSupportEnumeration: SAMLProtocolNamespace, + KeyDescriptors: []types.KeyDescriptor{ + { + Use: "signing", + KeyInfo: dsigtypes.KeyInfo{ + X509Data: dsigtypes.X509Data{ + X509Certificates: []dsigtypes.X509Certificate{{ + Data: base64.StdEncoding.EncodeToString(signingCertBytes), + }}, + }, + }, + }, + { + Use: "encryption", + KeyInfo: dsigtypes.KeyInfo{ + X509Data: dsigtypes.X509Data{ + X509Certificates: []dsigtypes.X509Certificate{{ + Data: base64.StdEncoding.EncodeToString(encryptionCertBytes), + }}, + }, + }, + EncryptionMethods: []types.EncryptionMethod{ + {Algorithm: types.MethodAES128GCM, DigestMethod: nil}, + {Algorithm: types.MethodAES192GCM, DigestMethod: nil}, + {Algorithm: types.MethodAES256GCM, DigestMethod: nil}, + {Algorithm: types.MethodAES128CBC, DigestMethod: nil}, + {Algorithm: types.MethodAES256CBC, DigestMethod: nil}, + }, + }, + }, + AssertionConsumerServices: []types.IndexedEndpoint{{ + Binding: BindingHttpPost, + Location: sp.AssertionConsumerServiceURL, + Index: 1, + }}, + SingleLogoutServices: []types.Endpoint{{ + Binding: BindingHttpPost, + Location: sp.ServiceProviderSLOURL, + }}, + }, + }, nil +} + +func (sp *SAMLServiceProvider) GetEncryptionKey() dsig.X509KeyStore { + return sp.SPKeyStore +} + +func (sp *SAMLServiceProvider) GetSigningKey() dsig.X509KeyStore { + if sp.SPSigningKeyStore == nil { + return sp.GetEncryptionKey() // Default is signing key is same as encryption key + } + return sp.SPSigningKeyStore +} + +func (sp *SAMLServiceProvider) GetEncryptionCertBytes() ([]byte, error) { + if _, encryptionCert, err := sp.GetEncryptionKey().GetKeyPair(); err != nil { + return nil, ErrSaml{Message: "no SP encryption certificate", System: err} + } else if len(encryptionCert) < 1 { + return nil, ErrSaml{Message: "empty SP encryption certificate"} + } else { + return encryptionCert, nil + } +} + +func (sp *SAMLServiceProvider) GetSigningCertBytes() ([]byte, error) { + if _, signingCert, err := sp.GetSigningKey().GetKeyPair(); err != nil { + return nil, ErrSaml{Message: "no SP signing certificate", System: err} + } else if len(signingCert) < 1 { + return nil, ErrSaml{Message: "empty SP signing certificate"} + } else { + return signingCert, nil + } +} + +func (sp *SAMLServiceProvider) SigningContext() *dsig.SigningContext { + sp.signingContextMu.RLock() + signingContext := sp.signingContext + sp.signingContextMu.RUnlock() + + if signingContext != nil { + return signingContext + } + + sp.signingContextMu.Lock() + defer sp.signingContextMu.Unlock() + + sp.signingContext = dsig.NewDefaultSigningContext(sp.GetSigningKey()) + sp.signingContext.SetSignatureMethod(sp.SignAuthnRequestsAlgorithm) + if sp.SignAuthnRequestsCanonicalizer != nil { + sp.signingContext.Canonicalizer = sp.SignAuthnRequestsCanonicalizer + } + + return sp.signingContext +} + +type ProxyRestriction struct { + Count int + Audience []string +} + +type WarningInfo struct { + OneTimeUse bool + ProxyRestriction *ProxyRestriction + NotInAudience bool + InvalidTime bool +} + +type AssertionInfo struct { + NameID string + Values Values + WarningInfo *WarningInfo + SessionIndex string + AuthnInstant *time.Time + SessionNotOnOrAfter *time.Time + Assertions []types.Assertion + ResponseSignatureValidated bool +} diff --git a/vendor/github.com/mattermost/gosaml2/test_constants.go b/vendor/github.com/mattermost/gosaml2/test_constants.go new file mode 100644 index 00000000..871e9713 --- /dev/null +++ b/vendor/github.com/mattermost/gosaml2/test_constants.go @@ -0,0 +1,418 @@ +// Copyright 2016 Russell Haering et al. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package saml2 + +var idpCertificate = ` +-----BEGIN CERTIFICATE----- +MIIDpDCCAoygAwIBAgIGAVLIBhAwMA0GCSqGSIb3DQEBBQUAMIGSMQswCQYDVQQGEwJVUzETMBEG +A1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwET2t0YTEU +MBIGA1UECwwLU1NPUHJvdmlkZXIxEzARBgNVBAMMCmRldi0xMTY4MDcxHDAaBgkqhkiG9w0BCQEW +DWluZm9Ab2t0YS5jb20wHhcNMTYwMjA5MjE1MjA2WhcNMjYwMjA5MjE1MzA2WjCBkjELMAkGA1UE +BhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28xDTALBgNV +BAoMBE9rdGExFDASBgNVBAsMC1NTT1Byb3ZpZGVyMRMwEQYDVQQDDApkZXYtMTE2ODA3MRwwGgYJ +KoZIhvcNAQkBFg1pbmZvQG9rdGEuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA +mtjBOZ8MmhUyi8cGk4dUY6Fj1MFDt/q3FFiaQpLzu3/q5lRVUNUBbAtqQWwY10dzfZguHOuvA5p5 +QyiVDvUhe+XkVwN2R2WfArQJRTPnIcOaHrxqQf3o5cCIG21ZtysFHJSo8clPSOe+0VsoRgcJ1aF4 +2rODwgqRRZdO9Wh3502XlJ799DJQ23IC7XasKEsGKzJqhlRrfd/FyIuZT0sFHDKRz5snSJhm9gpN +uQlCmk7ONZ1sXqtt+nBIfWIqeoYQubPW7pT5GTc7wouWq4TCjHJiK9k2HiyNxW0E3JX08swEZi2+ +LVDjgLzNc4lwjSYIj3AOtPZs8s606oBdIBni4wIDAQABMA0GCSqGSIb3DQEBBQUAA4IBAQBMxSkJ +TxkXxsoKNW0awJNpWRbU81QpheMFfENIzLam4Itc/5kSZAaSy/9e2QKfo4jBo/MMbCq2vM9TyeJQ +DJpRaioUTd2lGh4TLUxAxCxtUk/pascL+3Nn936LFmUCLxaxnbeGzPOXAhscCtU1H0nFsXRnKx5a +cPXYSKFZZZktieSkww2Oi8dg2DYaQhGQMSFMVqgVfwEu4bvCRBvdSiNXdWGCZQmFVzBZZ/9rOLzP +pvTFTPnpkavJm81FLlUhiE/oFgKlCDLWDknSpXAI0uZGERcwPca6xvIMh86LjQKjbVci9FYDStXC +qRnqQ+TccSu/B6uONFsDEngGcXSKfB+a +-----END CERTIFICATE----- +` + +const rawResponse = ` +http://www.okta.com/exk5zt0r12Edi4rD20h7http://www.okta.com/exk5zt0r12Edi4rD20h7FsWGCBC+t/LaVkUKUvRQpzyZTmlxUzw4R9FOzXPPJRw=hS50WgYs/cn3uxmhrza/0/0QW3H7bwdjPZ2hQmG7IeSd7awTOghBqdrjvaPfQ7tRW+UK6ewMgIBVKG6jV3qYAWeW2U70hMb7hE9qJqBKyYyimmhVWULx1HB2YmlU1wmispywoPlXQ6gj0iWaL2RFI83vUp7X50eZ6dELqoJVZpzQI065Tt0TG7UuKUW1flYsbiS9NaXnuw+mcrBW25ZA9F5CLePHki01ZzUw+XtNmKthEb7SR30mzPoj08Dji22daYvGu82IR01wIZPoQJPCGMT6y2xC/pQPqGljAg/vUa+gaYgaMaAVYxhk/hfgMUBlOeKACBaGTmygab1Nz5KvPg==MIIDpDCCAoygAwIBAgIGAVLIBhAwMA0GCSqGSIb3DQEBBQUAMIGSMQswCQYDVQQGEwJVUzETMBEG +A1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwET2t0YTEU +MBIGA1UECwwLU1NPUHJvdmlkZXIxEzARBgNVBAMMCmRldi0xMTY4MDcxHDAaBgkqhkiG9w0BCQEW +DWluZm9Ab2t0YS5jb20wHhcNMTYwMjA5MjE1MjA2WhcNMjYwMjA5MjE1MzA2WjCBkjELMAkGA1UE +BhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28xDTALBgNV +BAoMBE9rdGExFDASBgNVBAsMC1NTT1Byb3ZpZGVyMRMwEQYDVQQDDApkZXYtMTE2ODA3MRwwGgYJ +KoZIhvcNAQkBFg1pbmZvQG9rdGEuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA +mtjBOZ8MmhUyi8cGk4dUY6Fj1MFDt/q3FFiaQpLzu3/q5lRVUNUBbAtqQWwY10dzfZguHOuvA5p5 +QyiVDvUhe+XkVwN2R2WfArQJRTPnIcOaHrxqQf3o5cCIG21ZtysFHJSo8clPSOe+0VsoRgcJ1aF4 +2rODwgqRRZdO9Wh3502XlJ799DJQ23IC7XasKEsGKzJqhlRrfd/FyIuZT0sFHDKRz5snSJhm9gpN +uQlCmk7ONZ1sXqtt+nBIfWIqeoYQubPW7pT5GTc7wouWq4TCjHJiK9k2HiyNxW0E3JX08swEZi2+ +LVDjgLzNc4lwjSYIj3AOtPZs8s606oBdIBni4wIDAQABMA0GCSqGSIb3DQEBBQUAA4IBAQBMxSkJ +TxkXxsoKNW0awJNpWRbU81QpheMFfENIzLam4Itc/5kSZAaSy/9e2QKfo4jBo/MMbCq2vM9TyeJQ +DJpRaioUTd2lGh4TLUxAxCxtUk/pascL+3Nn936LFmUCLxaxnbeGzPOXAhscCtU1H0nFsXRnKx5a +cPXYSKFZZZktieSkww2Oi8dg2DYaQhGQMSFMVqgVfwEu4bvCRBvdSiNXdWGCZQmFVzBZZ/9rOLzP +pvTFTPnpkavJm81FLlUhiE/oFgKlCDLWDknSpXAI0uZGERcwPca6xvIMh86LjQKjbVci9FYDStXC +qRnqQ+TccSu/B6uONFsDEngGcXSKfB+aphoebe.simon@scaleft.com123urn:oasis:names:tc:SAML:2.0:ac:classes:PasswordProtectedTransportPhoebeSimonphoebe.simon@scaleft.comphoebesimon` + +const manInTheMiddledResponse = ` +http://www.okta.com/exk5zt0r12Edi4rD20h7ijTqmVmDy7ssK+rvmJaCQ6AQaFaXz+HIN/r6O37B0eQ=G09fAYXGDLK+/jAekHsNL0RLo40Xm6+VwXmUj0IDIrvIIv/mJU5VD6ylOLnPezLDBVY9BJst1YCz+8krdvmQ8Stkd6qiN2bN/5KpCdika111YGpeNdMmg/E57ZG3S895hTNJQYOfCwhPFUtQuXLkspOaw81pcqOTr+bVSofJ8uQP7cVQa/ANxbjKAj0fhAuxAvZfiqPms5Stv4sNGpzULUDJl87CoEleHExGmpTsI7Qt3EvGToPMZXPHF4MGvuC0Z2ZD4iI6Pr7xk98t54PJtAX2qJu1tZqBJmL0Qcq5spl9W3yC1tAZuDeFLm1C4/T9crO2Q5WILP/tkw/yJ+ZttQ==MIIDpDCCAoygAwIBAgIGAVLIBhAwMA0GCSqGSIb3DQEBBQUAMIGSMQswCQYDVQQGEwJVUzETMBEG +A1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwET2t0YTEU +MBIGA1UECwwLU1NPUHJvdmlkZXIxEzARBgNVBAMMCmRldi0xMTY4MDcxHDAaBgkqhkiG9w0BCQEW +DWluZm9Ab2t0YS5jb20wHhcNMTYwMjA5MjE1MjA2WhcNMjYwMjA5MjE1MzA2WjCBkjELMAkGA1UE +BhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28xDTALBgNV +BAoMBE9rdGExFDASBgNVBAsMC1NTT1Byb3ZpZGVyMRMwEQYDVQQDDApkZXYtMTE2ODA3MRwwGgYJ +KoZIhvcNAQkBFg1pbmZvQG9rdGEuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA +mtjBOZ8MmhUyi8cGk4dUY6Fj1MFDt/q3FFiaQpLzu3/q5lRVUNUBbAtqQWwY10dzfZguHOuvA5p5 +QyiVDvUhe+XkVwN2R2WfArQJRTPnIcOaHrxqQf3o5cCIG21ZtysFHJSo8clPSOe+0VsoRgcJ1aF4 +2rODwgqRRZdO9Wh3502XlJ799DJQ23IC7XasKEsGKzJqhlRrfd/FyIuZT0sFHDKRz5snSJhm9gpN +uQlCmk7ONZ1sXqtt+nBIfWIqeoYQubPW7pT5GTc7wouWq4TCjHJiK9k2HiyNxW0E3JX08swEZi2+ +LVDjgLzNc4lwjSYIj3AOtPZs8s606oBdIBni4wIDAQABMA0GCSqGSIb3DQEBBQUAA4IBAQBMxSkJ +TxkXxsoKNW0awJNpWRbU81QpheMFfENIzLam4Itc/5kSZAaSy/9e2QKfo4jBo/MMbCq2vM9TyeJQ +DJpRaioUTd2lGh4TLUxAxCxtUk/pascL+3Nn936LFmUCLxaxnbeGzPOXAhscCtU1H0nFsXRnKx5a +cPXYSKFZZZktieSkww2Oi8dg2DYaQhGQMSFMVqgVfwEu4bvCRBvdSiNXdWGCZQmFVzBZZ/9rOLzP +pvTFTPnpkavJm81FLlUhiE/oFgKlCDLWDknSpXAI0uZGERcwPca6xvIMh86LjQKjbVci9FYDStXC +qRnqQ+TccSu/B6uONFsDEngGcXSKfB+ahttp://www.okta.com/exk5zt0r12Edi4rD20h7zln6sheEO2JBdanrT5mZtJZ192tGHavuBpCFHQsJFVg=dHh6TWbnjtImyrfjPTX5QzE/6Vm/HsRWVvWWlvFAddf/CvhO4Kc5j8C7hvQoYMLhYuZMFFSReGysuDy5IscOJwTGhhcvb238qHSGGs6q8OUBCsmLSDAbIaGA++LV/tkUZ2ridGIi0yT81UOl1oT1batlHsK3eMyxkpnFmvBzIm4tGTzRkOPpYRLeiM9bxbKI+DM/623DCXyBCLYBzJo1O6QE02aLajwRMi/vmiV4LSiGlFcY9TtDCafdVJRv0tIQ25BQoT4feuHdr6S8xOSpGgRYH5ECamVOt4e079XdEkVUiSzQokiUkgDlTXEyerPLOVsOk4PW5nRs86sXIiGL5w==MIIDpDCCAoygAwIBAgIGAVLIBhAwMA0GCSqGSIb3DQEBBQUAMIGSMQswCQYDVQQGEwJVUzETMBEG +A1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwET2t0YTEU +MBIGA1UECwwLU1NPUHJvdmlkZXIxEzARBgNVBAMMCmRldi0xMTY4MDcxHDAaBgkqhkiH9w0BCQEW +DWluZm9Ab2t0YS5jb20wHhcNMTYwMjA5MjE1MjA2WhcNMjYwMjA5MjE1MzA2WjCBkjELMAkGA1UE +BhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28xDTALBgNV +BAoMBE9rdGExFDASBgNVBAsMC1NTT1Byb3ZpZGVyMRMwEQYDVQQDDApkZXYtMTE2ODA3MRwwGgYJ +KoZIhvcNAQkBFg1pbmZvQG9rdGEuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA +mtjBOZ8MmhUyi8cGk4dUY6Fj1MFDt/q3FFiaQpLzu3/q5lRVUNUBbAtqQWwY10dzfZguHOuvA5p5 +QyiVDvUhe+XkVwN2R2WfArQJRTPnIcOaHrxqQf3o5cCIG21ZtysFHJSo8clPSOe+0VsoRgcJ1aF4 +2rODwgqRRZdO9Wh3502XlJ799DJQ23IC7XasKEsGKzJqhlRrfd/FyIuZT0sFHDKRz5snSJhm9gpN +uQlCmk7ONZ1sXqtt+nBIfWIqeoYQubPW7pT5GTc7wouWq4TCjHJiK9k2HiyNxW0E3JX08swEZi2+ +LVDjgLzNc4lwjSYIj3AOtPZs8s606oBdIBni4wIDAQABMA0GCSqGSIb3DQEBBQUAA4IBAQBMxSkJ +TxkXxsoKNW0awJNpWRbU81QpheMFfENIzLam4Itc/5kSZAaSy/9e2QKfo4jBo/MMbCq2vM9TyeJQ +DJpRaioUTd2lGh4TLUxAxCxtUk/pascL+3Nn936LFmUCLxaxnbeGzPOXAhscCtU1H0nFsXRnKx5a +cPXYSKFZZZktieSkww2Oi8dg2DYaQhGQMSFMVqgVfwEu4bvCRBvdSiNXdWGCZQmFVzBZZ/9rOLzP +pvTFTPnpkavJm81FLlUhiE/oFgKlCDLWDknSpXAI0uZGERcwPca6xvIMh86LjQKjbVci9FYDStXC +qRnqQ+TccSu/B6uONFsDEngGcXSKfB+aphoebe.simon@scaleft.com123urn:oasis:names:tc:SAML:2.0:ac:classes:PasswordProtectedTransportPhoebeSimonphoebe.simon@scaleft.com` + +const alteredReferenceURIResponse = ` +http://www.okta.com/exk5zt0r12Edi4rD20h7ijTqmVmDy7ssK+rvmJaCQ6AQaFaXz+HIN/r6O37B0eQ=G09fAYXGDLK+/jAekHsNL0RLo40Xm6+VwXmUj0IDIrvIIv/mJU5VD6ylOLnPezLDBVY9BJst1YCz+8krdvmQ8Stkd6qiN2bN/5KpCdika111YGpeNdMmg/E57ZG3S895hTNJQYOfCwhPFUtQuXLkspOaw81pcqOTr+bVSofJ8uQP7cVQa/ANxbjKAj0fhAuxAvZfiqPms5Stv4sNGpzULUDJl87CoEleHExGmpTsI7Qt3EvGToPMZXPHF4MGvuC0Z2ZD4iI6Pr7xk98t54PJtAX2qJu1tZqBJmL0Qcq5spl9W3yC1tAZuDeFLm1C4/T9crO2Q5WILP/tkw/yJ+ZttQ==MIIDpDCCAoygAwIBAgIGAVLIBhAwMA0GCSqGSIb3DQEBBQUAMIGSMQswCQYDVQQGEwJVUzETMBEG +A1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwET2t0YTEU +MBIGA1UECwwLU1NPUHJvdmlkZXIxEzARBgNVBAMMCmRldi0xMTY4MDcxHDAaBgkqhkiG9w0BCQEW +DWluZm9Ab2t0YS5jb20wHhcNMTYwMjA5MjE1MjA2WhcNMjYwMjA5MjE1MzA2WjCBkjELMAkGA1UE +BhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28xDTALBgNV +BAoMBE9rdGExFDASBgNVBAsMC1NTT1Byb3ZpZGVyMRMwEQYDVQQDDApkZXYtMTE2ODA3MRwwGgYJ +KoZIhvcNAQkBFg1pbmZvQG9rdGEuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA +mtjBOZ8MmhUyi8cGk4dUY6Fj1MFDt/q3FFiaQpLzu3/q5lRVUNUBbAtqQWwY10dzfZguHOuvA5p5 +QyiVDvUhe+XkVwN2R2WfArQJRTPnIcOaHrxqQf3o5cCIG21ZtysFHJSo8clPSOe+0VsoRgcJ1aF4 +2rODwgqRRZdO9Wh3502XlJ799DJQ23IC7XasKEsGKzJqhlRrfd/FyIuZT0sFHDKRz5snSJhm9gpN +uQlCmk7ONZ1sXqtt+nBIfWIqeoYQubPW7pT5GTc7wouWq4TCjHJiK9k2HiyNxW0E3JX08swEZi2+ +LVDjgLzNc4lwjSYIj3AOtPZs8s606oBdIBni4wIDAQABMA0GCSqGSIb3DQEBBQUAA4IBAQBMxSkJ +TxkXxsoKNW0awJNpWRbU81QpheMFfENIzLam4Itc/5kSZAaSy/9e2QKfo4jBo/MMbCq2vM9TyeJQ +DJpRaioUTd2lGh4TLUxAxCxtUk/pascL+3Nn936LFmUCLxaxnbeGzPOXAhscCtU1H0nFsXRnKx5a +cPXYSKFZZZktieSkww2Oi8dg2DYaQhGQMSFMVqgVfwEu4bvCRBvdSiNXdWGCZQmFVzBZZ/9rOLzP +pvTFTPnpkavJm81FLlUhiE/oFgKlCDLWDknSpXAI0uZGERcwPca6xvIMh86LjQKjbVci9FYDStXC +qRnqQ+TccSu/B6uONFsDEngGcXSKfB+ahttp://www.okta.com/exk5zt0r12Edi4rD20h7zln6sheEO2JBdanrT5mZtJZ192tGHavuBpCFHQsJFVg=dHh6TWbnjtImyrfjPTX5QzE/6Vm/HsRWVvWWlvFAddf/CvhO4Kc5j8C7hvQoYMLhYuZMFFSReGysuDy5IscOJwTGhhcvb238qHSGGs6q8OUBCsmLSDAbIaGA++LV/tkUZ2ridGIi0yT81UOl1oT1batlHsK3eMyxkpnFmvBzIm4tGTzRkOPpYRLeiM9bxbKI+DM/623DCXyBCLYBzJo1O6QE02aLajwRMi/vmiV4LSiGlFcY9TtDCafdVJRv0tIQ25BQoT4feuHdr6S8xOSpGgRYH5ECamVOt4e079XdEkVUiSzQokiUkgDlTXEyerPLOVsOk4PW5nRs86sXIiGL5w==MIIDpDCCAoygAwIBAgIGAVLIBhAwMA0GCSqGSIb3DQEBBQUAMIGSMQswCQYDVQQGEwJVUzETMBEG +A1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwET2t0YTEU +MBIGA1UECwwLU1NPUHJvdmlkZXIxEzARBgNVBAMMCmRldi0xMTY4MDcxHDAaBgkqhkiH9w0BCQEW +DWluZm9Ab2t0YS5jb20wHhcNMTYwMjA5MjE1MjA2WhcNMjYwMjA5MjE1MzA2WjCBkjELMAkGA1UE +BhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28xDTALBgNV +BAoMBE9rdGExFDASBgNVBAsMC1NTT1Byb3ZpZGVyMRMwEQYDVQQDDApkZXYtMTE2ODA3MRwwGgYJ +KoZIhvcNAQkBFg1pbmZvQG9rdGEuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA +mtjBOZ8MmhUyi8cGk4dUY6Fj1MFDt/q3FFiaQpLzu3/q5lRVUNUBbAtqQWwY10dzfZguHOuvA5p5 +QyiVDvUhe+XkVwN2R2WfArQJRTPnIcOaHrxqQf3o5cCIG21ZtysFHJSo8clPSOe+0VsoRgcJ1aF4 +2rODwgqRRZdO9Wh3502XlJ799DJQ23IC7XasKEsGKzJqhlRrfd/FyIuZT0sFHDKRz5snSJhm9gpN +uQlCmk7ONZ1sXqtt+nBIfWIqeoYQubPW7pT5GTc7wouWq4TCjHJiK9k2HiyNxW0E3JX08swEZi2+ +LVDjgLzNc4lwjSYIj3AOtPZs8s606oBdIBni4wIDAQABMA0GCSqGSIb3DQEBBQUAA4IBAQBMxSkJ +TxkXxsoKNW0awJNpWRbU81QpheMFfENIzLam4Itc/5kSZAaSy/9e2QKfo4jBo/MMbCq2vM9TyeJQ +DJpRaioUTd2lGh4TLUxAxCxtUk/pascL+3Nn936LFmUCLxaxnbeGzPOXAhscCtU1H0nFsXRnKx5a +cPXYSKFZZZktieSkww2Oi8dg2DYaQhGQMSFMVqgVfwEu4bvCRBvdSiNXdWGCZQmFVzBZZ/9rOLzP +pvTFTPnpkavJm81FLlUhiE/oFgKlCDLWDknSpXAI0uZGERcwPca6xvIMh86LjQKjbVci9FYDStXC +qRnqQ+TccSu/B6uONFsDEngGcXSKfB+aphoebe.simon@scaleft.com123urn:oasis:names:tc:SAML:2.0:ac:classes:PasswordProtectedTransportPhoebeSimonphoebe.simon@scaleft.com` + +const alteredSignedInfoResponse = ` +http://www.okta.com/exk5zt0r12Edi4rD20h7ijTqmVmDy7ssK+rvmJaCQ6AQaFaXz+HIN/r6O37B0eQ=G09fAYXGDLK+/jAekHsNL0RLo40Xm6+VwXmUj0IDIrvIIv/mJU5VD6ylOLnPezLDBVY9BJst1YCz+8krdvmQ8Stkd6qiN2bN/5KpCdika111YGpeNdMmg/E57ZG3S895hTNJQYOfCwhPFUtQuXLkspOaw81pcqOTr+bVSofJ8uQP7cVQa/ANxbjKAj0fhAuxAvZfiqPms5Stv4sNGpzULUDJl87CoEleHExGmpTsI7Qt3EvGToPMZXPHF4MGvuC0Z2ZD4iI6Pr7xk98t54PJtAX2qJu1tZqBJmL0Qcq5spl9W3yC1tAZuDeFLm1C4/T9crO2Q5WILP/tkw/yJ+ZttQ==MIIDpDCCAoygAwIBAgIGAVLIBhAwMA0GCSqGSIb3DQEBBQUAMIGSMQswCQYDVQQGEwJVUzETMBEG +A1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwET2t0YTEV +MBIGA1UECwwLU1NPUHJvdmlkZXIxEzARBgNVBAMMCmRldi0xMTY4MDcxHDAaBgkqhkiG9w0BCQEW +DWluZm9Ab2t0YS5jb20wHhcNMTYwMjA5MjE1MjA2WhcNMjYwMjA5MjE1MzA2WjCBkjELMAkGA1UE +BhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28xDTALBgNV +BAoMBE9rdGExFDASBgNVBAsMC1NTT1Byb3ZpZGVyMRMwEQYDVQQDDApkZXYtMTE2ODA3MRwwGgYJ +KoZIhvcNAQkBFg1pbmZvQG9rdGEuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA +mtjBOZ8MmhUyi8cGk4dUY6Fj1MFDt/q3FFiaQpLzu3/q5lRVUNUBbAtqQWwY10dzfZguHOuvA5p5 +QyiVDvUhe+XkVwN2R2WfArQJRTPnIcOaHrxqQf3o5cCIG21ZtysFHJSo8clPSOe+0VsoRgcJ1aF4 +2rODwgqRRZdO9Wh3502XlJ799DJQ23IC7XasKEsGKzJqhlRrfd/FyIuZT0sFHDKRz5snSJhm9gpN +uQlCmk7ONZ1sXqtt+nBIfWIqeoYQubPW7pT5GTc7wouWq4TCjHJiK9k2HiyNxW0E3JX08swEZi2+ +LVDjgLzNc4lwjSYIj3AOtPZs8s606oBdIBni4wIDAQABMA0GCSqGSIb3DQEBBQUAA4IBAQBMxSkJ +TxkXxsoKNW0awJNpWRbU81QpheMFfENIzLam4Itc/5kSZAaSy/9e2QKfo4jBo/MMbCq2vM9TyeJQ +DJpRaioUTd2lGh4TLUxAxCxtUk/pascL+3Nn936LFmUCLxaxnbeGzPOXAhscCtU1H0nFsXRnKx5a +cPXYSKFZZZktieSkww2Oi8dg2DYaQhGQMSFMVqgVfwEu4bvCRBvdSiNXdWGCZQmFVzBZZ/9rOLzP +pvTFTPnpkavJm81FLlUhiE/oFgKlCDLWDknSpXAI0uZGERcwPca6xvIMh86LjQKjbVci9FYDStXC +qRnqQ+TccSu/B6uONFsDEngGcXSKfB+ahttp://www.okta.com/exk5zt0r12Edi4rD20h7zln6sheEO2JBdanrT5mZtJZ192tGHavuBpCFHQsJFVg=dHh6TWbnjtImyrfjPTX5QzE/6Vm/HsRWVvWWlvFAddf/CvhO4Kc5j8C7hvQoYMLhYuZMFFSReGysuDy5IscOJwTGhhcvb238qHSGGs6q8OUBCsmLSDAbIaGA++LV/tkUZ2ridGIi0yT81UOl1oT1batlHsK3eMyxkpnFmvBzIm4tGTzRkOPpYRLeiM9bxbKI+DM/623DCXyBCLYBzJo1O6QE02aLajwRMi/vmiV4LSiGlFcY9TtDCafdVJRv0tIQ25BQoT4feuHdr6S8xOSpGgRYH5ECamVOt4e079XdEkVUiSzQokiUkgDlTXEyerPLOVsOk4PW5nRs86sXIiGL5w==MIIDpDCCAoygAwIBAgIGAVLIBhAwMA0GCSqGSIb3DQEBBQUAMIGSMQswCQYDVQQGEwJVUzETMBEG +A1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwET2t0YTEU +MBIGA1UECwwLU1NPUHJvdmlkZXIxEzARBgNVBAMMCmRldi0xMTY4MDcxHDAaBgkqhkiH9w0BCQEW +DWluZm9Ab2t0YS5jb20wHhcNMTYwMjA5MjE1MjA2WhcNMjYwMjA5MjE1MzA2WjCBkjELMAkGA1UE +BhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28xDTALBgNV +BAoMBE9rdGExFDASBgNVBAsMC1NTT1Byb3ZpZGVyMRMwEQYDVQQDDApkZXYtMTE2ODA3MRwwGgYJ +KoZIhvcNAQkBFg1pbmZvQG9rdGEuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA +mtjBOZ8MmhUyi8cGk4dUY6Fj1MFDt/q3FFiaQpLzu3/q5lRVUNUBbAtqQWwY10dzfZguHOuvA5p5 +QyiVDvUhe+XkVwN2R2WfArQJRTPnIcOaHrxqQf3o5cCIG21ZtysFHJSo8clPSOe+0VsoRgcJ1aF4 +2rODwgqRRZdO9Wh3502XlJ799DJQ23IC7XasKEsGKzJqhlRrfd/FyIuZT0sFHDKRz5snSJhm9gpN +uQlCmk7ONZ1sXqtt+nBIfWIqeoYQubPW7pT5GTc7wouWq4TCjHJiK9k2HiyNxW0E3JX08swEZi2+ +LVDjgLzNc4lwjSYIj3AOtPZs8s606oBdIBni4wIDAQABMA0GCSqGSIb3DQEBBQUAA4IBAQBMxSkJ +TxkXxsoKNW0awJNpWRbU81QpheMFfENIzLam4Itc/5kSZAaSy/9e2QKfo4jBo/MMbCq2vM9TyeJQ +DJpRaioUTd2lGh4TLUxAxCxtUk/pascL+3Nn936LFmUCLxaxnbeGzPOXAhscCtU1H0nFsXRnKx5a +cPXYSKFZZZktieSkww2Oi8dg2DYaQhGQMSFMVqgVfwEu4bvCRBvdSiNXdWGCZQmFVzBZZ/9rOLzP +pvTFTPnpkavJm81FLlUhiE/oFgKlCDLWDknSpXAI0uZGERcwPca6xvIMh86LjQKjbVci9FYDStXC +qRnqQ+TccSu/B6uONFsDEngGcXSKfB+aphoebe.simon@scaleft.com123urn:oasis:names:tc:SAML:2.0:ac:classes:PasswordProtectedTransportPhoebeSimonphoebe.simon@scaleft.com` + +const alteredRecipientResponse = ` +http://www.okta.com/exk5zt0r12Edi4rD20h7http://www.okta.com/exk5zt0r12Edi4rD20h7FsWGCBC+t/LaVkUKUvRQpzyZTmlxUzw4R9FOzXPPJRw=hS50WgYs/cn3uxmhrza/0/0QW3H7bwdjPZ2hQmG7IeSd7awTOghBqdrjvaPfQ7tRW+UK6ewMgIBVKG6jV3qYAWeW2U70hMb7hE9qJqBKyYyimmhVWULx1HB2YmlU1wmispywoPlXQ6gj0iWaL2RFI83vUp7X50eZ6dELqoJVZpzQI065Tt0TG7UuKUW1flYsbiS9NaXnuw+mcrBW25ZA9F5CLePHki01ZzUw+XtNmKthEb7SR30mzPoj08Dji22daYvGu82IR01wIZPoQJPCGMT6y2xC/pQPqGljAg/vUa+gaYgaMaAVYxhk/hfgMUBlOeKACBaGTmygab1Nz5KvPg==MIIDpDCCAoygAwIBAgIGAVLIBhAwMA0GCSqGSIb3DQEBBQUAMIGSMQswCQYDVQQGEwJVUzETMBEG +A1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwET2t0YTEU +MBIGA1UECwwLU1NPUHJvdmlkZXIxEzARBgNVBAMMCmRldi0xMTY4MDcxHDAaBgkqhkiG9w0BCQEW +DWluZm9Ab2t0YS5jb20wHhcNMTYwMjA5MjE1MjA2WhcNMjYwMjA5MjE1MzA2WjCBkjELMAkGA1UE +BhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28xDTALBgNV +BAoMBE9rdGExFDASBgNVBAsMC1NTT1Byb3ZpZGVyMRMwEQYDVQQDDApkZXYtMTE2ODA3MRwwGgYJ +KoZIhvcNAQkBFg1pbmZvQG9rdGEuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA +mtjBOZ8MmhUyi8cGk4dUY6Fj1MFDt/q3FFiaQpLzu3/q5lRVUNUBbAtqQWwY10dzfZguHOuvA5p5 +QyiVDvUhe+XkVwN2R2WfArQJRTPnIcOaHrxqQf3o5cCIG21ZtysFHJSo8clPSOe+0VsoRgcJ1aF4 +2rODwgqRRZdO9Wh3502XlJ799DJQ23IC7XasKEsGKzJqhlRrfd/FyIuZT0sFHDKRz5snSJhm9gpN +uQlCmk7ONZ1sXqtt+nBIfWIqeoYQubPW7pT5GTc7wouWq4TCjHJiK9k2HiyNxW0E3JX08swEZi2+ +LVDjgLzNc4lwjSYIj3AOtPZs8s606oBdIBni4wIDAQABMA0GCSqGSIb3DQEBBQUAA4IBAQBMxSkJ +TxkXxsoKNW0awJNpWRbU81QpheMFfENIzLam4Itc/5kSZAaSy/9e2QKfo4jBo/MMbCq2vM9TyeJQ +DJpRaioUTd2lGh4TLUxAxCxtUk/pascL+3Nn936LFmUCLxaxnbeGzPOXAhscCtU1H0nFsXRnKx5a +cPXYSKFZZZktieSkww2Oi8dg2DYaQhGQMSFMVqgVfwEu4bvCRBvdSiNXdWGCZQmFVzBZZ/9rOLzP +pvTFTPnpkavJm81FLlUhiE/oFgKlCDLWDknSpXAI0uZGERcwPca6xvIMh86LjQKjbVci9FYDStXC +qRnqQ+TccSu/B6uONFsDEngGcXSKfB+aphoebe.simon@scaleft.com123urn:oasis:names:tc:SAML:2.0:ac:classes:PasswordProtectedTransportPhoebeSimonphoebe.simon@scaleft.comphoebe.simon@scaleft.com` + +const alteredSubjectConfirmationMethodResponse = ` +http://www.okta.com/exk5zt0r12Edi4rD20h7http://www.okta.com/exk5zt0r12Edi4rD20h7FsWGCBC+t/LaVkUKUvRQpzyZTmlxUzw4R9FOzXPPJRw=hS50WgYs/cn3uxmhrza/0/0QW3H7bwdjPZ2hQmG7IeSd7awTOghBqdrjvaPfQ7tRW+UK6ewMgIBVKG6jV3qYAWeW2U70hMb7hE9qJqBKyYyimmhVWULx1HB2YmlU1wmispywoPlXQ6gj0iWaL2RFI83vUp7X50eZ6dELqoJVZpzQI065Tt0TG7UuKUW1flYsbiS9NaXnuw+mcrBW25ZA9F5CLePHki01ZzUw+XtNmKthEb7SR30mzPoj08Dji22daYvGu82IR01wIZPoQJPCGMT6y2xC/pQPqGljAg/vUa+gaYgaMaAVYxhk/hfgMUBlOeKACBaGTmygab1Nz5KvPg==MIIDpDCCAoygAwIBAgIGAVLIBhAwMA0GCSqGSIb3DQEBBQUAMIGSMQswCQYDVQQGEwJVUzETMBEG +A1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwET2t0YTEU +MBIGA1UECwwLU1NPUHJvdmlkZXIxEzARBgNVBAMMCmRldi0xMTY4MDcxHDAaBgkqhkiG9w0BCQEW +DWluZm9Ab2t0YS5jb20wHhcNMTYwMjA5MjE1MjA2WhcNMjYwMjA5MjE1MzA2WjCBkjELMAkGA1UE +BhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28xDTALBgNV +BAoMBE9rdGExFDASBgNVBAsMC1NTT1Byb3ZpZGVyMRMwEQYDVQQDDApkZXYtMTE2ODA3MRwwGgYJ +KoZIhvcNAQkBFg1pbmZvQG9rdGEuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA +mtjBOZ8MmhUyi8cGk4dUY6Fj1MFDt/q3FFiaQpLzu3/q5lRVUNUBbAtqQWwY10dzfZguHOuvA5p5 +QyiVDvUhe+XkVwN2R2WfArQJRTPnIcOaHrxqQf3o5cCIG21ZtysFHJSo8clPSOe+0VsoRgcJ1aF4 +2rODwgqRRZdO9Wh3502XlJ799DJQ23IC7XasKEsGKzJqhlRrfd/FyIuZT0sFHDKRz5snSJhm9gpN +uQlCmk7ONZ1sXqtt+nBIfWIqeoYQubPW7pT5GTc7wouWq4TCjHJiK9k2HiyNxW0E3JX08swEZi2+ +LVDjgLzNc4lwjSYIj3AOtPZs8s606oBdIBni4wIDAQABMA0GCSqGSIb3DQEBBQUAA4IBAQBMxSkJ +TxkXxsoKNW0awJNpWRbU81QpheMFfENIzLam4Itc/5kSZAaSy/9e2QKfo4jBo/MMbCq2vM9TyeJQ +DJpRaioUTd2lGh4TLUxAxCxtUk/pascL+3Nn936LFmUCLxaxnbeGzPOXAhscCtU1H0nFsXRnKx5a +cPXYSKFZZZktieSkww2Oi8dg2DYaQhGQMSFMVqgVfwEu4bvCRBvdSiNXdWGCZQmFVzBZZ/9rOLzP +pvTFTPnpkavJm81FLlUhiE/oFgKlCDLWDknSpXAI0uZGERcwPca6xvIMh86LjQKjbVci9FYDStXC +qRnqQ+TccSu/B6uONFsDEngGcXSKfB+aphoebe.simon@scaleft.com123urn:oasis:names:tc:SAML:2.0:ac:classes:PasswordProtectedTransportPhoebeSimonphoebe.simon@scaleft.comphoebe.simon@scaleft.com` + +const alteredDestinationResponse = ` +http://www.okta.com/exk5zt0r12Edi4rD20h7http://www.okta.com/exk5zt0r12Edi4rD20h7FsWGCBC+t/LaVkUKUvRQpzyZTmlxUzw4R9FOzXPPJRw=hS50WgYs/cn3uxmhrza/0/0QW3H7bwdjPZ2hQmG7IeSd7awTOghBqdrjvaPfQ7tRW+UK6ewMgIBVKG6jV3qYAWeW2U70hMb7hE9qJqBKyYyimmhVWULx1HB2YmlU1wmispywoPlXQ6gj0iWaL2RFI83vUp7X50eZ6dELqoJVZpzQI065Tt0TG7UuKUW1flYsbiS9NaXnuw+mcrBW25ZA9F5CLePHki01ZzUw+XtNmKthEb7SR30mzPoj08Dji22daYvGu82IR01wIZPoQJPCGMT6y2xC/pQPqGljAg/vUa+gaYgaMaAVYxhk/hfgMUBlOeKACBaGTmygab1Nz5KvPg==MIIDpDCCAoygAwIBAgIGAVLIBhAwMA0GCSqGSIb3DQEBBQUAMIGSMQswCQYDVQQGEwJVUzETMBEG +A1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwET2t0YTEU +MBIGA1UECwwLU1NPUHJvdmlkZXIxEzARBgNVBAMMCmRldi0xMTY4MDcxHDAaBgkqhkiG9w0BCQEW +DWluZm9Ab2t0YS5jb20wHhcNMTYwMjA5MjE1MjA2WhcNMjYwMjA5MjE1MzA2WjCBkjELMAkGA1UE +BhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28xDTALBgNV +BAoMBE9rdGExFDASBgNVBAsMC1NTT1Byb3ZpZGVyMRMwEQYDVQQDDApkZXYtMTE2ODA3MRwwGgYJ +KoZIhvcNAQkBFg1pbmZvQG9rdGEuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA +mtjBOZ8MmhUyi8cGk4dUY6Fj1MFDt/q3FFiaQpLzu3/q5lRVUNUBbAtqQWwY10dzfZguHOuvA5p5 +QyiVDvUhe+XkVwN2R2WfArQJRTPnIcOaHrxqQf3o5cCIG21ZtysFHJSo8clPSOe+0VsoRgcJ1aF4 +2rODwgqRRZdO9Wh3502XlJ799DJQ23IC7XasKEsGKzJqhlRrfd/FyIuZT0sFHDKRz5snSJhm9gpN +uQlCmk7ONZ1sXqtt+nBIfWIqeoYQubPW7pT5GTc7wouWq4TCjHJiK9k2HiyNxW0E3JX08swEZi2+ +LVDjgLzNc4lwjSYIj3AOtPZs8s606oBdIBni4wIDAQABMA0GCSqGSIb3DQEBBQUAA4IBAQBMxSkJ +TxkXxsoKNW0awJNpWRbU81QpheMFfENIzLam4Itc/5kSZAaSy/9e2QKfo4jBo/MMbCq2vM9TyeJQ +DJpRaioUTd2lGh4TLUxAxCxtUk/pascL+3Nn936LFmUCLxaxnbeGzPOXAhscCtU1H0nFsXRnKx5a +cPXYSKFZZZktieSkww2Oi8dg2DYaQhGQMSFMVqgVfwEu4bvCRBvdSiNXdWGCZQmFVzBZZ/9rOLzP +pvTFTPnpkavJm81FLlUhiE/oFgKlCDLWDknSpXAI0uZGERcwPca6xvIMh86LjQKjbVci9FYDStXC +qRnqQ+TccSu/B6uONFsDEngGcXSKfB+aphoebe.simon@scaleft.com123urn:oasis:names:tc:SAML:2.0:ac:classes:PasswordProtectedTransportPhoebeSimonphoebe.simon@scaleft.comphoebe.simon@scaleft.com` + +const alteredVersionResponse = ` +http://www.okta.com/exk5zt0r12Edi4rD20h7http://www.okta.com/exk5zt0r12Edi4rD20h7FsWGCBC+t/LaVkUKUvRQpzyZTmlxUzw4R9FOzXPPJRw=hS50WgYs/cn3uxmhrza/0/0QW3H7bwdjPZ2hQmG7IeSd7awTOghBqdrjvaPfQ7tRW+UK6ewMgIBVKG6jV3qYAWeW2U70hMb7hE9qJqBKyYyimmhVWULx1HB2YmlU1wmispywoPlXQ6gj0iWaL2RFI83vUp7X50eZ6dELqoJVZpzQI065Tt0TG7UuKUW1flYsbiS9NaXnuw+mcrBW25ZA9F5CLePHki01ZzUw+XtNmKthEb7SR30mzPoj08Dji22daYvGu82IR01wIZPoQJPCGMT6y2xC/pQPqGljAg/vUa+gaYgaMaAVYxhk/hfgMUBlOeKACBaGTmygab1Nz5KvPg==MIIDpDCCAoygAwIBAgIGAVLIBhAwMA0GCSqGSIb3DQEBBQUAMIGSMQswCQYDVQQGEwJVUzETMBEG +A1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwET2t0YTEU +MBIGA1UECwwLU1NPUHJvdmlkZXIxEzARBgNVBAMMCmRldi0xMTY4MDcxHDAaBgkqhkiG9w0BCQEW +DWluZm9Ab2t0YS5jb20wHhcNMTYwMjA5MjE1MjA2WhcNMjYwMjA5MjE1MzA2WjCBkjELMAkGA1UE +BhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28xDTALBgNV +BAoMBE9rdGExFDASBgNVBAsMC1NTT1Byb3ZpZGVyMRMwEQYDVQQDDApkZXYtMTE2ODA3MRwwGgYJ +KoZIhvcNAQkBFg1pbmZvQG9rdGEuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA +mtjBOZ8MmhUyi8cGk4dUY6Fj1MFDt/q3FFiaQpLzu3/q5lRVUNUBbAtqQWwY10dzfZguHOuvA5p5 +QyiVDvUhe+XkVwN2R2WfArQJRTPnIcOaHrxqQf3o5cCIG21ZtysFHJSo8clPSOe+0VsoRgcJ1aF4 +2rODwgqRRZdO9Wh3502XlJ799DJQ23IC7XasKEsGKzJqhlRrfd/FyIuZT0sFHDKRz5snSJhm9gpN +uQlCmk7ONZ1sXqtt+nBIfWIqeoYQubPW7pT5GTc7wouWq4TCjHJiK9k2HiyNxW0E3JX08swEZi2+ +LVDjgLzNc4lwjSYIj3AOtPZs8s606oBdIBni4wIDAQABMA0GCSqGSIb3DQEBBQUAA4IBAQBMxSkJ +TxkXxsoKNW0awJNpWRbU81QpheMFfENIzLam4Itc/5kSZAaSy/9e2QKfo4jBo/MMbCq2vM9TyeJQ +DJpRaioUTd2lGh4TLUxAxCxtUk/pascL+3Nn936LFmUCLxaxnbeGzPOXAhscCtU1H0nFsXRnKx5a +cPXYSKFZZZktieSkww2Oi8dg2DYaQhGQMSFMVqgVfwEu4bvCRBvdSiNXdWGCZQmFVzBZZ/9rOLzP +pvTFTPnpkavJm81FLlUhiE/oFgKlCDLWDknSpXAI0uZGERcwPca6xvIMh86LjQKjbVci9FYDStXC +qRnqQ+TccSu/B6uONFsDEngGcXSKfB+aphoebe.simon@scaleft.com123urn:oasis:names:tc:SAML:2.0:ac:classes:PasswordProtectedTransportPhoebeSimonphoebe.simon@scaleft.comphoebe.simon@scaleft.com` + +const missingIDResponse = ` +http://www.okta.com/exk5zt0r12Edi4rD20h7mj+xyS5DtKVNbbFq4caWhGcrirqNzv7mIHNzHQH/f60=GA1URoMOE5EFfkHYimGXm7Ecph/m0s135VyF9Wut6NSpuZdQ2crM1IslvKCRjkE09rZgagQQMAThUcOFuX35dZPz9J4Ihpt1juhfGv1AV8I8jiOKFETj65MiPabDEi8+P6YWf4qNujAJXHKJIa/MFXBqoKR/imLQT8eu1nhVBQGYqWwZePddfXO2JYk2ce7mtnyMT0dUVb+o+tlEDYa7ri9fj4JL/z1XX7yrbVZxn2mdKPJtSSP8uHNOWSM6j1vp4oK+KSDviBfiVLlVA58noz5GyFtp642h+LV2quKbncMFfnfB1kfHLK/xaz9UaDBy+bHK4oGzSpVhZqcOzzliKA==MIIDpDCCAoygAwIBAgIGAVLIBhAwMA0GCSqGSIb3DQEBBQUAMIGSMQswCQYDVQQGEwJVUzETMBEG +A1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwET2t0YTEU +MBIGA1UECwwLU1NPUHJvdmlkZXIxEzARBgNVBAMMCmRldi0xMTY4MDcxHDAaBgkqhkiG9w0BCQEW +DWluZm9Ab2t0YS5jb20wHhcNMTYwMjA5MjE1MjA2WhcNMjYwMjA5MjE1MzA2WjCBkjELMAkGA1UE +BhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28xDTALBgNV +BAoMBE9rdGExFDASBgNVBAsMC1NTT1Byb3ZpZGVyMRMwEQYDVQQDDApkZXYtMTE2ODA3MRwwGgYJ +KoZIhvcNAQkBFg1pbmZvQG9rdGEuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA +mtjBOZ8MmhUyi8cGk4dUY6Fj1MFDt/q3FFiaQpLzu3/q5lRVUNUBbAtqQWwY10dzfZguHOuvA5p5 +QyiVDvUhe+XkVwN2R2WfArQJRTPnIcOaHrxqQf3o5cCIG21ZtysFHJSo8clPSOe+0VsoRgcJ1aF4 +2rODwgqRRZdO9Wh3502XlJ799DJQ23IC7XasKEsGKzJqhlRrfd/FyIuZT0sFHDKRz5snSJhm9gpN +uQlCmk7ONZ1sXqtt+nBIfWIqeoYQubPW7pT5GTc7wouWq4TCjHJiK9k2HiyNxW0E3JX08swEZi2+ +LVDjgLzNc4lwjSYIj3AOtPZs8s606oBdIBni4wIDAQABMA0GCSqGSIb3DQEBBQUAA4IBAQBMxSkJ +TxkXxsoKNW0awJNpWRbU81QpheMFfENIzLam4Itc/5kSZAaSy/9e2QKfo4jBo/MMbCq2vM9TyeJQ +DJpRaioUTd2lGh4TLUxAxCxtUk/pascL+3Nn936LFmUCLxaxnbeGzPOXAhscCtU1H0nFsXRnKx5a +cPXYSKFZZZktieSkww2Oi8dg2DYaQhGQMSFMVqgVfwEu4bvCRBvdSiNXdWGCZQmFVzBZZ/9rOLzP +pvTFTPnpkavJm81FLlUhiE/oFgKlCDLWDknSpXAI0uZGERcwPca6xvIMh86LjQKjbVci9FYDStXC +qRnqQ+TccSu/B6uONFsDEngGcXSKfB+ahttp://www.okta.com/exk5zt0r12Edi4rD20h7FsWGCBC+t/LaVkUKUvRQpzyZTmlxUzw4R9FOzXPPJRw=hS50WgYs/cn3uxmhrza/0/0QW3H7bwdjPZ2hQmG7IeSd7awTOghBqdrjvaPfQ7tRW+UK6ewMgIBVKG6jV3qYAWeW2U70hMb7hE9qJqBKyYyimmhVWULx1HB2YmlU1wmispywoPlXQ6gj0iWaL2RFI83vUp7X50eZ6dELqoJVZpzQI065Tt0TG7UuKUW1flYsbiS9NaXnuw+mcrBW25ZA9F5CLePHki01ZzUw+XtNmKthEb7SR30mzPoj08Dji22daYvGu82IR01wIZPoQJPCGMT6y2xC/pQPqGljAg/vUa+gaYgaMaAVYxhk/hfgMUBlOeKACBaGTmygab1Nz5KvPg==MIIDpDCCAoygAwIBAgIGAVLIBhAwMA0GCSqGSIb3DQEBBQUAMIGSMQswCQYDVQQGEwJVUzETMBEG +A1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwET2t0YTEU +MBIGA1UECwwLU1NPUHJvdmlkZXIxEzARBgNVBAMMCmRldi0xMTY4MDcxHDAaBgkqhkiG9w0BCQEW +DWluZm9Ab2t0YS5jb20wHhcNMTYwMjA5MjE1MjA2WhcNMjYwMjA5MjE1MzA2WjCBkjELMAkGA1UE +BhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28xDTALBgNV +BAoMBE9rdGExFDASBgNVBAsMC1NTT1Byb3ZpZGVyMRMwEQYDVQQDDApkZXYtMTE2ODA3MRwwGgYJ +KoZIhvcNAQkBFg1pbmZvQG9rdGEuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA +mtjBOZ8MmhUyi8cGk4dUY6Fj1MFDt/q3FFiaQpLzu3/q5lRVUNUBbAtqQWwY10dzfZguHOuvA5p5 +QyiVDvUhe+XkVwN2R2WfArQJRTPnIcOaHrxqQf3o5cCIG21ZtysFHJSo8clPSOe+0VsoRgcJ1aF4 +2rODwgqRRZdO9Wh3502XlJ799DJQ23IC7XasKEsGKzJqhlRrfd/FyIuZT0sFHDKRz5snSJhm9gpN +uQlCmk7ONZ1sXqtt+nBIfWIqeoYQubPW7pT5GTc7wouWq4TCjHJiK9k2HiyNxW0E3JX08swEZi2+ +LVDjgLzNc4lwjSYIj3AOtPZs8s606oBdIBni4wIDAQABMA0GCSqGSIb3DQEBBQUAA4IBAQBMxSkJ +TxkXxsoKNW0awJNpWRbU81QpheMFfENIzLam4Itc/5kSZAaSy/9e2QKfo4jBo/MMbCq2vM9TyeJQ +DJpRaioUTd2lGh4TLUxAxCxtUk/pascL+3Nn936LFmUCLxaxnbeGzPOXAhscCtU1H0nFsXRnKx5a +cPXYSKFZZZktieSkww2Oi8dg2DYaQhGQMSFMVqgVfwEu4bvCRBvdSiNXdWGCZQmFVzBZZ/9rOLzP +pvTFTPnpkavJm81FLlUhiE/oFgKlCDLWDknSpXAI0uZGERcwPca6xvIMh86LjQKjbVci9FYDStXC +qRnqQ+TccSu/B6uONFsDEngGcXSKfB+aphoebe.simon@scaleft.com123urn:oasis:names:tc:SAML:2.0:ac:classes:PasswordProtectedTransportPhoebeSimonphoebe.simon@scaleft.comphoebe.simon@scaleft.com` + +const assertionInfoResponse = ` +http://www.okta.com/exk5zt0r12Edi4rD20h7http://www.okta.com/exk5zt0r12Edi4rD20h7FsWGCBC+t/LaVkUKUvRQpzyZTmlxUzw4R9FOzXPPJRw=hS50WgYs/cn3uxmhrza/0/0QW3H7bwdjPZ2hQmG7IeSd7awTOghBqdrjvaPfQ7tRW+UK6ewMgIBVKG6jV3qYAWeW2U70hMb7hE9qJqBKyYyimmhVWULx1HB2YmlU1wmispywoPlXQ6gj0iWaL2RFI83vUp7X50eZ6dELqoJVZpzQI065Tt0TG7UuKUW1flYsbiS9NaXnuw+mcrBW25ZA9F5CLePHki01ZzUw+XtNmKthEb7SR30mzPoj08Dji22daYvGu82IR01wIZPoQJPCGMT6y2xC/pQPqGljAg/vUa+gaYgaMaAVYxhk/hfgMUBlOeKACBaGTmygab1Nz5KvPg==MIIDpDCCAoygAwIBAgIGAVLIBhAwMA0GCSqGSIb3DQEBBQUAMIGSMQswCQYDVQQGEwJVUzETMBEG +A1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwET2t0YTEU +MBIGA1UECwwLU1NPUHJvdmlkZXIxEzARBgNVBAMMCmRldi0xMTY4MDcxHDAaBgkqhkiG9w0BCQEW +DWluZm9Ab2t0YS5jb20wHhcNMTYwMjA5MjE1MjA2WhcNMjYwMjA5MjE1MzA2WjCBkjELMAkGA1UE +BhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28xDTALBgNV +BAoMBE9rdGExFDASBgNVBAsMC1NTT1Byb3ZpZGVyMRMwEQYDVQQDDApkZXYtMTE2ODA3MRwwGgYJ +KoZIhvcNAQkBFg1pbmZvQG9rdGEuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA +mtjBOZ8MmhUyi8cGk4dUY6Fj1MFDt/q3FFiaQpLzu3/q5lRVUNUBbAtqQWwY10dzfZguHOuvA5p5 +QyiVDvUhe+XkVwN2R2WfArQJRTPnIcOaHrxqQf3o5cCIG21ZtysFHJSo8clPSOe+0VsoRgcJ1aF4 +2rODwgqRRZdO9Wh3502XlJ799DJQ23IC7XasKEsGKzJqhlRrfd/FyIuZT0sFHDKRz5snSJhm9gpN +uQlCmk7ONZ1sXqtt+nBIfWIqeoYQubPW7pT5GTc7wouWq4TCjHJiK9k2HiyNxW0E3JX08swEZi2+ +LVDjgLzNc4lwjSYIj3AOtPZs8s606oBdIBni4wIDAQABMA0GCSqGSIb3DQEBBQUAA4IBAQBMxSkJ +TxkXxsoKNW0awJNpWRbU81QpheMFfENIzLam4Itc/5kSZAaSy/9e2QKfo4jBo/MMbCq2vM9TyeJQ +DJpRaioUTd2lGh4TLUxAxCxtUk/pascL+3Nn936LFmUCLxaxnbeGzPOXAhscCtU1H0nFsXRnKx5a +cPXYSKFZZZktieSkww2Oi8dg2DYaQhGQMSFMVqgVfwEu4bvCRBvdSiNXdWGCZQmFVzBZZ/9rOLzP +pvTFTPnpkavJm81FLlUhiE/oFgKlCDLWDknSpXAI0uZGERcwPca6xvIMh86LjQKjbVci9FYDStXC +qRnqQ+TccSu/B6uONFsDEngGcXSKfB+aphoebe.simon@scaleft.com123urn:oasis:names:tc:SAML:2.0:ac:classes:PasswordProtectedTransportPhoebeSimonphoebe.simon@scaleft.comphoebe.simon@scaleft.com` + +const assertionInfoModifiedAudienceResponse = ` +http://www.okta.com/exk5zt0r12Edi4rD20h7http://www.okta.com/exk5zt0r12Edi4rD20h7FsWGCBC+t/LaVkUKUvRQpzyZTmlxUzw4R9FOzXPPJRw=hS50WgYs/cn3uxmhrza/0/0QW3H7bwdjPZ2hQmG7IeSd7awTOghBqdrjvaPfQ7tRW+UK6ewMgIBVKG6jV3qYAWeW2U70hMb7hE9qJqBKyYyimmhVWULx1HB2YmlU1wmispywoPlXQ6gj0iWaL2RFI83vUp7X50eZ6dELqoJVZpzQI065Tt0TG7UuKUW1flYsbiS9NaXnuw+mcrBW25ZA9F5CLePHki01ZzUw+XtNmKthEb7SR30mzPoj08Dji22daYvGu82IR01wIZPoQJPCGMT6y2xC/pQPqGljAg/vUa+gaYgaMaAVYxhk/hfgMUBlOeKACBaGTmygab1Nz5KvPg==MIIDpDCCAoygAwIBAgIGAVLIBhAwMA0GCSqGSIb3DQEBBQUAMIGSMQswCQYDVQQGEwJVUzETMBEG +A1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwET2t0YTEU +MBIGA1UECwwLU1NPUHJvdmlkZXIxEzARBgNVBAMMCmRldi0xMTY4MDcxHDAaBgkqhkiG9w0BCQEW +DWluZm9Ab2t0YS5jb20wHhcNMTYwMjA5MjE1MjA2WhcNMjYwMjA5MjE1MzA2WjCBkjELMAkGA1UE +BhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28xDTALBgNV +BAoMBE9rdGExFDASBgNVBAsMC1NTT1Byb3ZpZGVyMRMwEQYDVQQDDApkZXYtMTE2ODA3MRwwGgYJ +KoZIhvcNAQkBFg1pbmZvQG9rdGEuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA +mtjBOZ8MmhUyi8cGk4dUY6Fj1MFDt/q3FFiaQpLzu3/q5lRVUNUBbAtqQWwY10dzfZguHOuvA5p5 +QyiVDvUhe+XkVwN2R2WfArQJRTPnIcOaHrxqQf3o5cCIG21ZtysFHJSo8clPSOe+0VsoRgcJ1aF4 +2rODwgqRRZdO9Wh3502XlJ799DJQ23IC7XasKEsGKzJqhlRrfd/FyIuZT0sFHDKRz5snSJhm9gpN +uQlCmk7ONZ1sXqtt+nBIfWIqeoYQubPW7pT5GTc7wouWq4TCjHJiK9k2HiyNxW0E3JX08swEZi2+ +LVDjgLzNc4lwjSYIj3AOtPZs8s606oBdIBni4wIDAQABMA0GCSqGSIb3DQEBBQUAA4IBAQBMxSkJ +TxkXxsoKNW0awJNpWRbU81QpheMFfENIzLam4Itc/5kSZAaSy/9e2QKfo4jBo/MMbCq2vM9TyeJQ +DJpRaioUTd2lGh4TLUxAxCxtUk/pascL+3Nn936LFmUCLxaxnbeGzPOXAhscCtU1H0nFsXRnKx5a +cPXYSKFZZZktieSkww2Oi8dg2DYaQhGQMSFMVqgVfwEu4bvCRBvdSiNXdWGCZQmFVzBZZ/9rOLzP +pvTFTPnpkavJm81FLlUhiE/oFgKlCDLWDknSpXAI0uZGERcwPca6xvIMh86LjQKjbVci9FYDStXC +qRnqQ+TccSu/B6uONFsDEngGcXSKfB+aphoebe.simon@scaleft.com124urn:oasis:names:tc:SAML:2.0:ac:classes:PasswordProtectedTransportPhoebeSimonphoebe.simon@scaleft.comphoebe.simon@scaleft.com` + +const assertionInfoOneTimeUseResponse = ` +http://www.okta.com/exk5zt0r12Edi4rD20h7http://www.okta.com/exk5zt0r12Edi4rD20h7FsWGCBC+t/LaVkUKUvRQpzyZTmlxUzw4R9FOzXPPJRw=hS50WgYs/cn3uxmhrza/0/0QW3H7bwdjPZ2hQmG7IeSd7awTOghBqdrjvaPfQ7tRW+UK6ewMgIBVKG6jV3qYAWeW2U70hMb7hE9qJqBKyYyimmhVWULx1HB2YmlU1wmispywoPlXQ6gj0iWaL2RFI83vUp7X50eZ6dELqoJVZpzQI065Tt0TG7UuKUW1flYsbiS9NaXnuw+mcrBW25ZA9F5CLePHki01ZzUw+XtNmKthEb7SR30mzPoj08Dji22daYvGu82IR01wIZPoQJPCGMT6y2xC/pQPqGljAg/vUa+gaYgaMaAVYxhk/hfgMUBlOeKACBaGTmygab1Nz5KvPg==MIIDpDCCAoygAwIBAgIGAVLIBhAwMA0GCSqGSIb3DQEBBQUAMIGSMQswCQYDVQQGEwJVUzETMBEG +A1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwET2t0YTEU +MBIGA1UECwwLU1NPUHJvdmlkZXIxEzARBgNVBAMMCmRldi0xMTY4MDcxHDAaBgkqhkiG9w0BCQEW +DWluZm9Ab2t0YS5jb20wHhcNMTYwMjA5MjE1MjA2WhcNMjYwMjA5MjE1MzA2WjCBkjELMAkGA1UE +BhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28xDTALBgNV +BAoMBE9rdGExFDASBgNVBAsMC1NTT1Byb3ZpZGVyMRMwEQYDVQQDDApkZXYtMTE2ODA3MRwwGgYJ +KoZIhvcNAQkBFg1pbmZvQG9rdGEuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA +mtjBOZ8MmhUyi8cGk4dUY6Fj1MFDt/q3FFiaQpLzu3/q5lRVUNUBbAtqQWwY10dzfZguHOuvA5p5 +QyiVDvUhe+XkVwN2R2WfArQJRTPnIcOaHrxqQf3o5cCIG21ZtysFHJSo8clPSOe+0VsoRgcJ1aF4 +2rODwgqRRZdO9Wh3502XlJ799DJQ23IC7XasKEsGKzJqhlRrfd/FyIuZT0sFHDKRz5snSJhm9gpN +uQlCmk7ONZ1sXqtt+nBIfWIqeoYQubPW7pT5GTc7wouWq4TCjHJiK9k2HiyNxW0E3JX08swEZi2+ +LVDjgLzNc4lwjSYIj3AOtPZs8s606oBdIBni4wIDAQABMA0GCSqGSIb3DQEBBQUAA4IBAQBMxSkJ +TxkXxsoKNW0awJNpWRbU81QpheMFfENIzLam4Itc/5kSZAaSy/9e2QKfo4jBo/MMbCq2vM9TyeJQ +DJpRaioUTd2lGh4TLUxAxCxtUk/pascL+3Nn936LFmUCLxaxnbeGzPOXAhscCtU1H0nFsXRnKx5a +cPXYSKFZZZktieSkww2Oi8dg2DYaQhGQMSFMVqgVfwEu4bvCRBvdSiNXdWGCZQmFVzBZZ/9rOLzP +pvTFTPnpkavJm81FLlUhiE/oFgKlCDLWDknSpXAI0uZGERcwPca6xvIMh86LjQKjbVci9FYDStXC +qRnqQ+TccSu/B6uONFsDEngGcXSKfB+aphoebe.simon@scaleft.com123urn:oasis:names:tc:SAML:2.0:ac:classes:PasswordProtectedTransportPhoebeSimonphoebe.simon@scaleft.comphoebe.simon@scaleft.com` + +const assertionInfoProxyRestrictionResponse = ` +http://www.okta.com/exk5zt0r12Edi4rD20h7http://www.okta.com/exk5zt0r12Edi4rD20h7FsWGCBC+t/LaVkUKUvRQpzyZTmlxUzw4R9FOzXPPJRw=hS50WgYs/cn3uxmhrza/0/0QW3H7bwdjPZ2hQmG7IeSd7awTOghBqdrjvaPfQ7tRW+UK6ewMgIBVKG6jV3qYAWeW2U70hMb7hE9qJqBKyYyimmhVWULx1HB2YmlU1wmispywoPlXQ6gj0iWaL2RFI83vUp7X50eZ6dELqoJVZpzQI065Tt0TG7UuKUW1flYsbiS9NaXnuw+mcrBW25ZA9F5CLePHki01ZzUw+XtNmKthEb7SR30mzPoj08Dji22daYvGu82IR01wIZPoQJPCGMT6y2xC/pQPqGljAg/vUa+gaYgaMaAVYxhk/hfgMUBlOeKACBaGTmygab1Nz5KvPg==MIIDpDCCAoygAwIBAgIGAVLIBhAwMA0GCSqGSIb3DQEBBQUAMIGSMQswCQYDVQQGEwJVUzETMBEG +A1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwET2t0YTEU +MBIGA1UECwwLU1NPUHJvdmlkZXIxEzARBgNVBAMMCmRldi0xMTY4MDcxHDAaBgkqhkiG9w0BCQEW +DWluZm9Ab2t0YS5jb20wHhcNMTYwMjA5MjE1MjA2WhcNMjYwMjA5MjE1MzA2WjCBkjELMAkGA1UE +BhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28xDTALBgNV +BAoMBE9rdGExFDASBgNVBAsMC1NTT1Byb3ZpZGVyMRMwEQYDVQQDDApkZXYtMTE2ODA3MRwwGgYJ +KoZIhvcNAQkBFg1pbmZvQG9rdGEuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA +mtjBOZ8MmhUyi8cGk4dUY6Fj1MFDt/q3FFiaQpLzu3/q5lRVUNUBbAtqQWwY10dzfZguHOuvA5p5 +QyiVDvUhe+XkVwN2R2WfArQJRTPnIcOaHrxqQf3o5cCIG21ZtysFHJSo8clPSOe+0VsoRgcJ1aF4 +2rODwgqRRZdO9Wh3502XlJ799DJQ23IC7XasKEsGKzJqhlRrfd/FyIuZT0sFHDKRz5snSJhm9gpN +uQlCmk7ONZ1sXqtt+nBIfWIqeoYQubPW7pT5GTc7wouWq4TCjHJiK9k2HiyNxW0E3JX08swEZi2+ +LVDjgLzNc4lwjSYIj3AOtPZs8s606oBdIBni4wIDAQABMA0GCSqGSIb3DQEBBQUAA4IBAQBMxSkJ +TxkXxsoKNW0awJNpWRbU81QpheMFfENIzLam4Itc/5kSZAaSy/9e2QKfo4jBo/MMbCq2vM9TyeJQ +DJpRaioUTd2lGh4TLUxAxCxtUk/pascL+3Nn936LFmUCLxaxnbeGzPOXAhscCtU1H0nFsXRnKx5a +cPXYSKFZZZktieSkww2Oi8dg2DYaQhGQMSFMVqgVfwEu4bvCRBvdSiNXdWGCZQmFVzBZZ/9rOLzP +pvTFTPnpkavJm81FLlUhiE/oFgKlCDLWDknSpXAI0uZGERcwPca6xvIMh86LjQKjbVci9FYDStXC +qRnqQ+TccSu/B6uONFsDEngGcXSKfB+aphoebe.simon@scaleft.com123123urn:oasis:names:tc:SAML:2.0:ac:classes:PasswordProtectedTransportPhoebeSimonphoebe.simon@scaleft.comphoebe.simon@scaleft.com` + +const assertionInfoProxyRestrictionNoCountResponse = ` +http://www.okta.com/exk5zt0r12Edi4rD20h7http://www.okta.com/exk5zt0r12Edi4rD20h7FsWGCBC+t/LaVkUKUvRQpzyZTmlxUzw4R9FOzXPPJRw=hS50WgYs/cn3uxmhrza/0/0QW3H7bwdjPZ2hQmG7IeSd7awTOghBqdrjvaPfQ7tRW+UK6ewMgIBVKG6jV3qYAWeW2U70hMb7hE9qJqBKyYyimmhVWULx1HB2YmlU1wmispywoPlXQ6gj0iWaL2RFI83vUp7X50eZ6dELqoJVZpzQI065Tt0TG7UuKUW1flYsbiS9NaXnuw+mcrBW25ZA9F5CLePHki01ZzUw+XtNmKthEb7SR30mzPoj08Dji22daYvGu82IR01wIZPoQJPCGMT6y2xC/pQPqGljAg/vUa+gaYgaMaAVYxhk/hfgMUBlOeKACBaGTmygab1Nz5KvPg==MIIDpDCCAoygAwIBAgIGAVLIBhAwMA0GCSqGSIb3DQEBBQUAMIGSMQswCQYDVQQGEwJVUzETMBEG +A1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwET2t0YTEU +MBIGA1UECwwLU1NPUHJvdmlkZXIxEzARBgNVBAMMCmRldi0xMTY4MDcxHDAaBgkqhkiG9w0BCQEW +DWluZm9Ab2t0YS5jb20wHhcNMTYwMjA5MjE1MjA2WhcNMjYwMjA5MjE1MzA2WjCBkjELMAkGA1UE +BhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28xDTALBgNV +BAoMBE9rdGExFDASBgNVBAsMC1NTT1Byb3ZpZGVyMRMwEQYDVQQDDApkZXYtMTE2ODA3MRwwGgYJ +KoZIhvcNAQkBFg1pbmZvQG9rdGEuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA +mtjBOZ8MmhUyi8cGk4dUY6Fj1MFDt/q3FFiaQpLzu3/q5lRVUNUBbAtqQWwY10dzfZguHOuvA5p5 +QyiVDvUhe+XkVwN2R2WfArQJRTPnIcOaHrxqQf3o5cCIG21ZtysFHJSo8clPSOe+0VsoRgcJ1aF4 +2rODwgqRRZdO9Wh3502XlJ799DJQ23IC7XasKEsGKzJqhlRrfd/FyIuZT0sFHDKRz5snSJhm9gpN +uQlCmk7ONZ1sXqtt+nBIfWIqeoYQubPW7pT5GTc7wouWq4TCjHJiK9k2HiyNxW0E3JX08swEZi2+ +LVDjgLzNc4lwjSYIj3AOtPZs8s606oBdIBni4wIDAQABMA0GCSqGSIb3DQEBBQUAA4IBAQBMxSkJ +TxkXxsoKNW0awJNpWRbU81QpheMFfENIzLam4Itc/5kSZAaSy/9e2QKfo4jBo/MMbCq2vM9TyeJQ +DJpRaioUTd2lGh4TLUxAxCxtUk/pascL+3Nn936LFmUCLxaxnbeGzPOXAhscCtU1H0nFsXRnKx5a +cPXYSKFZZZktieSkww2Oi8dg2DYaQhGQMSFMVqgVfwEu4bvCRBvdSiNXdWGCZQmFVzBZZ/9rOLzP +pvTFTPnpkavJm81FLlUhiE/oFgKlCDLWDknSpXAI0uZGERcwPca6xvIMh86LjQKjbVci9FYDStXC +qRnqQ+TccSu/B6uONFsDEngGcXSKfB+aphoebe.simon@scaleft.com123123urn:oasis:names:tc:SAML:2.0:ac:classes:PasswordProtectedTransportPhoebeSimonphoebe.simon@scaleft.comphoebe.simon@scaleft.com` + +const assertionInfoProxyRestrictionNoAudienceResponse = ` +http://www.okta.com/exk5zt0r12Edi4rD20h7http://www.okta.com/exk5zt0r12Edi4rD20h7FsWGCBC+t/LaVkUKUvRQpzyZTmlxUzw4R9FOzXPPJRw=hS50WgYs/cn3uxmhrza/0/0QW3H7bwdjPZ2hQmG7IeSd7awTOghBqdrjvaPfQ7tRW+UK6ewMgIBVKG6jV3qYAWeW2U70hMb7hE9qJqBKyYyimmhVWULx1HB2YmlU1wmispywoPlXQ6gj0iWaL2RFI83vUp7X50eZ6dELqoJVZpzQI065Tt0TG7UuKUW1flYsbiS9NaXnuw+mcrBW25ZA9F5CLePHki01ZzUw+XtNmKthEb7SR30mzPoj08Dji22daYvGu82IR01wIZPoQJPCGMT6y2xC/pQPqGljAg/vUa+gaYgaMaAVYxhk/hfgMUBlOeKACBaGTmygab1Nz5KvPg==MIIDpDCCAoygAwIBAgIGAVLIBhAwMA0GCSqGSIb3DQEBBQUAMIGSMQswCQYDVQQGEwJVUzETMBEG +A1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwET2t0YTEU +MBIGA1UECwwLU1NPUHJvdmlkZXIxEzARBgNVBAMMCmRldi0xMTY4MDcxHDAaBgkqhkiG9w0BCQEW +DWluZm9Ab2t0YS5jb20wHhcNMTYwMjA5MjE1MjA2WhcNMjYwMjA5MjE1MzA2WjCBkjELMAkGA1UE +BhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28xDTALBgNV +BAoMBE9rdGExFDASBgNVBAsMC1NTT1Byb3ZpZGVyMRMwEQYDVQQDDApkZXYtMTE2ODA3MRwwGgYJ +KoZIhvcNAQkBFg1pbmZvQG9rdGEuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA +mtjBOZ8MmhUyi8cGk4dUY6Fj1MFDt/q3FFiaQpLzu3/q5lRVUNUBbAtqQWwY10dzfZguHOuvA5p5 +QyiVDvUhe+XkVwN2R2WfArQJRTPnIcOaHrxqQf3o5cCIG21ZtysFHJSo8clPSOe+0VsoRgcJ1aF4 +2rODwgqRRZdO9Wh3502XlJ799DJQ23IC7XasKEsGKzJqhlRrfd/FyIuZT0sFHDKRz5snSJhm9gpN +uQlCmk7ONZ1sXqtt+nBIfWIqeoYQubPW7pT5GTc7wouWq4TCjHJiK9k2HiyNxW0E3JX08swEZi2+ +LVDjgLzNc4lwjSYIj3AOtPZs8s606oBdIBni4wIDAQABMA0GCSqGSIb3DQEBBQUAA4IBAQBMxSkJ +TxkXxsoKNW0awJNpWRbU81QpheMFfENIzLam4Itc/5kSZAaSy/9e2QKfo4jBo/MMbCq2vM9TyeJQ +DJpRaioUTd2lGh4TLUxAxCxtUk/pascL+3Nn936LFmUCLxaxnbeGzPOXAhscCtU1H0nFsXRnKx5a +cPXYSKFZZZktieSkww2Oi8dg2DYaQhGQMSFMVqgVfwEu4bvCRBvdSiNXdWGCZQmFVzBZZ/9rOLzP +pvTFTPnpkavJm81FLlUhiE/oFgKlCDLWDknSpXAI0uZGERcwPca6xvIMh86LjQKjbVci9FYDStXC +qRnqQ+TccSu/B6uONFsDEngGcXSKfB+aphoebe.simon@scaleft.com123urn:oasis:names:tc:SAML:2.0:ac:classes:PasswordProtectedTransportPhoebeSimonphoebe.simon@scaleft.comphoebe.simon@scaleft.com` + +const exampleBase64 = `<?xml version="1.0" encoding="UTF-8"?><saml2p:Response xmlns:saml2p="urn:oasis:names:tc:SAML:2.0:protocol" Destination="http://localhost:8080/v1/_saml_callback" ID="id103532804647787975381325" InResponseTo="_8699c655-c482-451a-9b7f-61668f140b47" IssueInstant="2016-03-16T01:02:57.682Z" Version="2.0"><saml2:Issuer xmlns:saml2="urn:oasis:names:tc:SAML:2.0:assertion" Format="urn:oasis:names:tc:SAML:2.0:nameid-format:entity">http://www.okta.com/exk5zt0r12Edi4rD20h7</saml2:Issuer><ds:Signature xmlns:ds="http://www.w3.org/2000/09/xmldsig#"><ds:SignedInfo><ds:CanonicalizationMethod Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#"/><ds:SignatureMethod Algorithm="http://www.w3.org/2001/04/xmldsig-more#rsa-sha256"/><ds:Reference URI="#id103532804647787975381325"><ds:Transforms><ds:Transform Algorithm="http://www.w3.org/2000/09/xmldsig#enveloped-signature"/><ds:Transform Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#"/></ds:Transforms><ds:DigestMethod Algorithm="http://www.w3.org/2001/04/xmlenc#sha256"/><ds:DigestValue>npTAl6kraksBlCRlunbyD6nICTcfsDaHjPXVxoDPrw0=</ds:DigestValue></ds:Reference></ds:SignedInfo><ds:SignatureValue>SbB03fI1TVstJ7q1B6Xx8YRGkDpNQ8ar4zF3p3aikkcq8TTS0eR28FoQuN1HX72Pn2rccE4ONizYNS6/rvrlyV/SlXXmC9mhTLRPeJz5mrxjqO5QYD1Y3IzmnkfQ6KutukkcGOJEpa3vYesf5JKRS+0WGRtzOS4wJF18oGIZ+ba8P6gxmMryA8xB/eJgxpfrmUbBjPHLSfljebh88EiNID088XuSGyd+3DmpW5B52DQB8cAyyOBRkRRcqLFIgxibmvtIiequU0a2ncojpu0+4ojl+4wDCWdGOEyqtJoTQXC4sKRaU79FK5IFfEiYMqviFD0oau4sPj0gnFCEF5Fl4w==</ds:SignatureValue><ds:KeyInfo><ds:X509Data><ds:X509Certificate>MIIDpDCCAoygAwIBAgIGAVLIBhAwMA0GCSqGSIb3DQEBBQUAMIGSMQswCQYDVQQGEwJVUzETMBEG
A1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwET2t0YTEU
MBIGA1UECwwLU1NPUHJvdmlkZXIxEzARBgNVBAMMCmRldi0xMTY4MDcxHDAaBgkqhkiG9w0BCQEW
DWluZm9Ab2t0YS5jb20wHhcNMTYwMjA5MjE1MjA2WhcNMjYwMjA5MjE1MzA2WjCBkjELMAkGA1UE
BhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28xDTALBgNV
BAoMBE9rdGExFDASBgNVBAsMC1NTT1Byb3ZpZGVyMRMwEQYDVQQDDApkZXYtMTE2ODA3MRwwGgYJ
KoZIhvcNAQkBFg1pbmZvQG9rdGEuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA
mtjBOZ8MmhUyi8cGk4dUY6Fj1MFDt/q3FFiaQpLzu3/q5lRVUNUBbAtqQWwY10dzfZguHOuvA5p5
QyiVDvUhe+XkVwN2R2WfArQJRTPnIcOaHrxqQf3o5cCIG21ZtysFHJSo8clPSOe+0VsoRgcJ1aF4
2rODwgqRRZdO9Wh3502XlJ799DJQ23IC7XasKEsGKzJqhlRrfd/FyIuZT0sFHDKRz5snSJhm9gpN
uQlCmk7ONZ1sXqtt+nBIfWIqeoYQubPW7pT5GTc7wouWq4TCjHJiK9k2HiyNxW0E3JX08swEZi2+
LVDjgLzNc4lwjSYIj3AOtPZs8s606oBdIBni4wIDAQABMA0GCSqGSIb3DQEBBQUAA4IBAQBMxSkJ
TxkXxsoKNW0awJNpWRbU81QpheMFfENIzLam4Itc/5kSZAaSy/9e2QKfo4jBo/MMbCq2vM9TyeJQ
DJpRaioUTd2lGh4TLUxAxCxtUk/pascL+3Nn936LFmUCLxaxnbeGzPOXAhscCtU1H0nFsXRnKx5a
cPXYSKFZZZktieSkww2Oi8dg2DYaQhGQMSFMVqgVfwEu4bvCRBvdSiNXdWGCZQmFVzBZZ/9rOLzP
pvTFTPnpkavJm81FLlUhiE/oFgKlCDLWDknSpXAI0uZGERcwPca6xvIMh86LjQKjbVci9FYDStXC
qRnqQ+TccSu/B6uONFsDEngGcXSKfB+a</ds:X509Certificate></ds:X509Data></ds:KeyInfo></ds:Signature><saml2p:Status xmlns:saml2p="urn:oasis:names:tc:SAML:2.0:protocol"><saml2p:StatusCode Value="urn:oasis:names:tc:SAML:2.0:status:Success"/></saml2p:Status><saml2:Assertion xmlns:saml2="urn:oasis:names:tc:SAML:2.0:assertion" ID="id1035328046526588900089424" IssueInstant="2016-03-16T01:02:57.682Z" Version="2.0"><saml2:Issuer Format="urn:oasis:names:tc:SAML:2.0:nameid-format:entity" xmlns:saml2="urn:oasis:names:tc:SAML:2.0:assertion">http://www.okta.com/exk5zt0r12Edi4rD20h7</saml2:Issuer><ds:Signature xmlns:ds="http://www.w3.org/2000/09/xmldsig#"><ds:SignedInfo><ds:CanonicalizationMethod Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#"/><ds:SignatureMethod Algorithm="http://www.w3.org/2001/04/xmldsig-more#rsa-sha256"/><ds:Reference URI="#id1035328046526588900089424"><ds:Transforms><ds:Transform Algorithm="http://www.w3.org/2000/09/xmldsig#enveloped-signature"/><ds:Transform Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#"/></ds:Transforms><ds:DigestMethod Algorithm="http://www.w3.org/2001/04/xmlenc#sha256"/><ds:DigestValue>No1VyQlk8Xif4FiJ+haViwEQySIzBa14lGy0coCn0c8=</ds:DigestValue></ds:Reference></ds:SignedInfo><ds:SignatureValue>VSV8Vw47q7n/XZwaQOPWQeKI5ZA69fnGZyEFhex4xuaIfC+LOYnfd8q8qcZsm1M6kv47H/dR6YXRIMjPKXZeyX/MKcmGPCadqWFT7EWFvzuO/uy/AB/CL5ZCQiY9H/aOhDysO8glse1S+Y2K0CwvsoRwMfFiO2XOYhVOsngUSkCBdLIB6Oq4f+ZsK0rw/E79n9QUd8owDq3dVC18SFYYdcIVDhQppglyuBEZfu2tG06gD9jls7ZE8vjcMfHmhuHtxlH3ovNLB35NFO/VrCNdFqmD76GnEA98foiJxCX8vzNHF4rPUFXAEdiS4OdQAxb7jNNVoKVYuadunLygysZGSg==</ds:SignatureValue><ds:KeyInfo><ds:X509Data><ds:X509Certificate>MIIDpDCCAoygAwIBAgIGAVLIBhAwMA0GCSqGSIb3DQEBBQUAMIGSMQswCQYDVQQGEwJVUzETMBEG
A1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwET2t0YTEU
MBIGA1UECwwLU1NPUHJvdmlkZXIxEzARBgNVBAMMCmRldi0xMTY4MDcxHDAaBgkqhkiG9w0BCQEW
DWluZm9Ab2t0YS5jb20wHhcNMTYwMjA5MjE1MjA2WhcNMjYwMjA5MjE1MzA2WjCBkjELMAkGA1UE
BhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28xDTALBgNV
BAoMBE9rdGExFDASBgNVBAsMC1NTT1Byb3ZpZGVyMRMwEQYDVQQDDApkZXYtMTE2ODA3MRwwGgYJ
KoZIhvcNAQkBFg1pbmZvQG9rdGEuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA
mtjBOZ8MmhUyi8cGk4dUY6Fj1MFDt/q3FFiaQpLzu3/q5lRVUNUBbAtqQWwY10dzfZguHOuvA5p5
QyiVDvUhe+XkVwN2R2WfArQJRTPnIcOaHrxqQf3o5cCIG21ZtysFHJSo8clPSOe+0VsoRgcJ1aF4
2rODwgqRRZdO9Wh3502XlJ799DJQ23IC7XasKEsGKzJqhlRrfd/FyIuZT0sFHDKRz5snSJhm9gpN
uQlCmk7ONZ1sXqtt+nBIfWIqeoYQubPW7pT5GTc7wouWq4TCjHJiK9k2HiyNxW0E3JX08swEZi2+
LVDjgLzNc4lwjSYIj3AOtPZs8s606oBdIBni4wIDAQABMA0GCSqGSIb3DQEBBQUAA4IBAQBMxSkJ
TxkXxsoKNW0awJNpWRbU81QpheMFfENIzLam4Itc/5kSZAaSy/9e2QKfo4jBo/MMbCq2vM9TyeJQ
DJpRaioUTd2lGh4TLUxAxCxtUk/pascL+3Nn936LFmUCLxaxnbeGzPOXAhscCtU1H0nFsXRnKx5a
cPXYSKFZZZktieSkww2Oi8dg2DYaQhGQMSFMVqgVfwEu4bvCRBvdSiNXdWGCZQmFVzBZZ/9rOLzP
pvTFTPnpkavJm81FLlUhiE/oFgKlCDLWDknSpXAI0uZGERcwPca6xvIMh86LjQKjbVci9FYDStXC
qRnqQ+TccSu/B6uONFsDEngGcXSKfB+a</ds:X509Certificate></ds:X509Data></ds:KeyInfo></ds:Signature><saml2:Subject xmlns:saml2="urn:oasis:names:tc:SAML:2.0:assertion"><saml2:NameID Format="urn:oasis:names:tc:SAML:1.1:nameid-format:unspecified">russell.haering@scaleft.com</saml2:NameID><saml2:SubjectConfirmation Method="urn:oasis:names:tc:SAML:2.0:cm:bearer"><saml2:SubjectConfirmationData InResponseTo="_8699c655-c482-451a-9b7f-61668f140b47" NotOnOrAfter="2016-03-16T01:07:57.682Z" Recipient="http://localhost:8080/v1/_saml_callback"/></saml2:SubjectConfirmation></saml2:Subject><saml2:Conditions NotBefore="2016-03-16T00:57:57.682Z" NotOnOrAfter="2016-03-16T01:07:57.682Z" xmlns:saml2="urn:oasis:names:tc:SAML:2.0:assertion"><saml2:AudienceRestriction><saml2:Audience>123</saml2:Audience></saml2:AudienceRestriction></saml2:Conditions><saml2:AuthnStatement AuthnInstant="2016-03-16T01:02:57.682Z" SessionIndex="_8699c655-c482-451a-9b7f-61668f140b47" xmlns:saml2="urn:oasis:names:tc:SAML:2.0:assertion"><saml2:AuthnContext><saml2:AuthnContextClassRef>urn:oasis:names:tc:SAML:2.0:ac:classes:PasswordProtectedTransport</saml2:AuthnContextClassRef></saml2:AuthnContext></saml2:AuthnStatement></saml2:Assertion></saml2p:Response>` + +const exampleBase64_2 = `<?xml version="1.0" encoding="UTF-8"?><saml2p:Response xmlns:saml2p="urn:oasis:names:tc:SAML:2.0:protocol" Destination="http://localhost:8080/v1/_saml_callback" ID="id2128248929510670834559185" InResponseTo="_da213df8-ef95-41d0-b9bf-71d271735cd7" IssueInstant="2016-03-28T16:38:18.565Z" Version="2.0" xmlns:xs="http://www.w3.org/2001/XMLSchema"><saml2:Issuer xmlns:saml2="urn:oasis:names:tc:SAML:2.0:assertion" Format="urn:oasis:names:tc:SAML:2.0:nameid-format:entity">http://www.okta.com/exk5zt0r12Edi4rD20h7</saml2:Issuer><ds:Signature xmlns:ds="http://www.w3.org/2000/09/xmldsig#"><ds:SignedInfo><ds:CanonicalizationMethod Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#"/><ds:SignatureMethod Algorithm="http://www.w3.org/2001/04/xmldsig-more#rsa-sha256"/><ds:Reference URI="#id2128248929510670834559185"><ds:Transforms><ds:Transform Algorithm="http://www.w3.org/2000/09/xmldsig#enveloped-signature"/><ds:Transform Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#"><ec:InclusiveNamespaces xmlns:ec="http://www.w3.org/2001/10/xml-exc-c14n#" PrefixList="xs"/></ds:Transform></ds:Transforms><ds:DigestMethod Algorithm="http://www.w3.org/2001/04/xmlenc#sha256"/><ds:DigestValue>WvgW/JfP4mjU+/1wtyX06E9EGxYNsoCU+rbSZnAvj2s=</ds:DigestValue></ds:Reference></ds:SignedInfo><ds:SignatureValue>GA1URoMOE5EFfkHYimGXm7Ecph/m0s135VyF9Wut6NSpuZdQ2crM1IslvKCRjkE09rZgagQQMAThUcOFuX35dZPz9J4Ihpt1juhfGv1AV8I8jiOKFETj65MiPabDEi8+P6YWf4qNujAJXHKJIa/MFXBqoKR/imLQT8eu1nhVBQGYqWwZePddfXO2JYk2ce7mtnyMT0dUVb+o+tlEDYa7ri9fj4JL/z1XX7yrbVZxn2mdKPJtSSP8uHNOWSM6j1vp4oK+KSDviBfiVLlVA58noz5GyFtp642h+LV2quKbncMFfnfB1kfHLK/xaz9UaDBy+bHK4oGzSpVhZqcOzzliKA==</ds:SignatureValue><ds:KeyInfo><ds:X509Data><ds:X509Certificate>MIIDpDCCAoygAwIBAgIGAVLIBhAwMA0GCSqGSIb3DQEBBQUAMIGSMQswCQYDVQQGEwJVUzETMBEG
A1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwET2t0YTEU
MBIGA1UECwwLU1NPUHJvdmlkZXIxEzARBgNVBAMMCmRldi0xMTY4MDcxHDAaBgkqhkiG9w0BCQEW
DWluZm9Ab2t0YS5jb20wHhcNMTYwMjA5MjE1MjA2WhcNMjYwMjA5MjE1MzA2WjCBkjELMAkGA1UE
BhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28xDTALBgNV
BAoMBE9rdGExFDASBgNVBAsMC1NTT1Byb3ZpZGVyMRMwEQYDVQQDDApkZXYtMTE2ODA3MRwwGgYJ
KoZIhvcNAQkBFg1pbmZvQG9rdGEuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA
mtjBOZ8MmhUyi8cGk4dUY6Fj1MFDt/q3FFiaQpLzu3/q5lRVUNUBbAtqQWwY10dzfZguHOuvA5p5
QyiVDvUhe+XkVwN2R2WfArQJRTPnIcOaHrxqQf3o5cCIG21ZtysFHJSo8clPSOe+0VsoRgcJ1aF4
2rODwgqRRZdO9Wh3502XlJ799DJQ23IC7XasKEsGKzJqhlRrfd/FyIuZT0sFHDKRz5snSJhm9gpN
uQlCmk7ONZ1sXqtt+nBIfWIqeoYQubPW7pT5GTc7wouWq4TCjHJiK9k2HiyNxW0E3JX08swEZi2+
LVDjgLzNc4lwjSYIj3AOtPZs8s606oBdIBni4wIDAQABMA0GCSqGSIb3DQEBBQUAA4IBAQBMxSkJ
TxkXxsoKNW0awJNpWRbU81QpheMFfENIzLam4Itc/5kSZAaSy/9e2QKfo4jBo/MMbCq2vM9TyeJQ
DJpRaioUTd2lGh4TLUxAxCxtUk/pascL+3Nn936LFmUCLxaxnbeGzPOXAhscCtU1H0nFsXRnKx5a
cPXYSKFZZZktieSkww2Oi8dg2DYaQhGQMSFMVqgVfwEu4bvCRBvdSiNXdWGCZQmFVzBZZ/9rOLzP
pvTFTPnpkavJm81FLlUhiE/oFgKlCDLWDknSpXAI0uZGERcwPca6xvIMh86LjQKjbVci9FYDStXC
qRnqQ+TccSu/B6uONFsDEngGcXSKfB+a</ds:X509Certificate></ds:X509Data></ds:KeyInfo></ds:Signature><saml2p:Status xmlns:saml2p="urn:oasis:names:tc:SAML:2.0:protocol"><saml2p:StatusCode Value="urn:oasis:names:tc:SAML:2.0:status:Success"/></saml2p:Status><saml2:Assertion xmlns:saml2="urn:oasis:names:tc:SAML:2.0:assertion" ID="id21282489295776782126468319" IssueInstant="2016-03-28T16:38:18.565Z" Version="2.0" xmlns:xs="http://www.w3.org/2001/XMLSchema"><saml2:Issuer Format="urn:oasis:names:tc:SAML:2.0:nameid-format:entity" xmlns:saml2="urn:oasis:names:tc:SAML:2.0:assertion">http://www.okta.com/exk5zt0r12Edi4rD20h7</saml2:Issuer><ds:Signature xmlns:ds="http://www.w3.org/2000/09/xmldsig#"><ds:SignedInfo><ds:CanonicalizationMethod Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#"/><ds:SignatureMethod Algorithm="http://www.w3.org/2001/04/xmldsig-more#rsa-sha256"/><ds:Reference URI="#id21282489295776782126468319"><ds:Transforms><ds:Transform Algorithm="http://www.w3.org/2000/09/xmldsig#enveloped-signature"/><ds:Transform Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#"><ec:InclusiveNamespaces xmlns:ec="http://www.w3.org/2001/10/xml-exc-c14n#" PrefixList="xs"/></ds:Transform></ds:Transforms><ds:DigestMethod Algorithm="http://www.w3.org/2001/04/xmlenc#sha256"/><ds:DigestValue>FsWGCBC+t/LaVkUKUvRQpzyZTmlxUzw4R9FOzXPPJRw=</ds:DigestValue></ds:Reference></ds:SignedInfo><ds:SignatureValue>hS50WgYs/cn3uxmhrza/0/0QW3H7bwdjPZ2hQmG7IeSd7awTOghBqdrjvaPfQ7tRW+UK6ewMgIBVKG6jV3qYAWeW2U70hMb7hE9qJqBKyYyimmhVWULx1HB2YmlU1wmispywoPlXQ6gj0iWaL2RFI83vUp7X50eZ6dELqoJVZpzQI065Tt0TG7UuKUW1flYsbiS9NaXnuw+mcrBW25ZA9F5CLePHki01ZzUw+XtNmKthEb7SR30mzPoj08Dji22daYvGu82IR01wIZPoQJPCGMT6y2xC/pQPqGljAg/vUa+gaYgaMaAVYxhk/hfgMUBlOeKACBaGTmygab1Nz5KvPg==</ds:SignatureValue><ds:KeyInfo><ds:X509Data><ds:X509Certificate>MIIDpDCCAoygAwIBAgIGAVLIBhAwMA0GCSqGSIb3DQEBBQUAMIGSMQswCQYDVQQGEwJVUzETMBEG
A1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwET2t0YTEU
MBIGA1UECwwLU1NPUHJvdmlkZXIxEzARBgNVBAMMCmRldi0xMTY4MDcxHDAaBgkqhkiG9w0BCQEW
DWluZm9Ab2t0YS5jb20wHhcNMTYwMjA5MjE1MjA2WhcNMjYwMjA5MjE1MzA2WjCBkjELMAkGA1UE
BhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28xDTALBgNV
BAoMBE9rdGExFDASBgNVBAsMC1NTT1Byb3ZpZGVyMRMwEQYDVQQDDApkZXYtMTE2ODA3MRwwGgYJ
KoZIhvcNAQkBFg1pbmZvQG9rdGEuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA
mtjBOZ8MmhUyi8cGk4dUY6Fj1MFDt/q3FFiaQpLzu3/q5lRVUNUBbAtqQWwY10dzfZguHOuvA5p5
QyiVDvUhe+XkVwN2R2WfArQJRTPnIcOaHrxqQf3o5cCIG21ZtysFHJSo8clPSOe+0VsoRgcJ1aF4
2rODwgqRRZdO9Wh3502XlJ799DJQ23IC7XasKEsGKzJqhlRrfd/FyIuZT0sFHDKRz5snSJhm9gpN
uQlCmk7ONZ1sXqtt+nBIfWIqeoYQubPW7pT5GTc7wouWq4TCjHJiK9k2HiyNxW0E3JX08swEZi2+
LVDjgLzNc4lwjSYIj3AOtPZs8s606oBdIBni4wIDAQABMA0GCSqGSIb3DQEBBQUAA4IBAQBMxSkJ
TxkXxsoKNW0awJNpWRbU81QpheMFfENIzLam4Itc/5kSZAaSy/9e2QKfo4jBo/MMbCq2vM9TyeJQ
DJpRaioUTd2lGh4TLUxAxCxtUk/pascL+3Nn936LFmUCLxaxnbeGzPOXAhscCtU1H0nFsXRnKx5a
cPXYSKFZZZktieSkww2Oi8dg2DYaQhGQMSFMVqgVfwEu4bvCRBvdSiNXdWGCZQmFVzBZZ/9rOLzP
pvTFTPnpkavJm81FLlUhiE/oFgKlCDLWDknSpXAI0uZGERcwPca6xvIMh86LjQKjbVci9FYDStXC
qRnqQ+TccSu/B6uONFsDEngGcXSKfB+a</ds:X509Certificate></ds:X509Data></ds:KeyInfo></ds:Signature><saml2:Subject xmlns:saml2="urn:oasis:names:tc:SAML:2.0:assertion"><saml2:NameID Format="urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress">phoebe.simon@scaleft.com</saml2:NameID><saml2:SubjectConfirmation Method="urn:oasis:names:tc:SAML:2.0:cm:bearer"><saml2:SubjectConfirmationData InResponseTo="_da213df8-ef95-41d0-b9bf-71d271735cd7" NotOnOrAfter="2016-03-28T16:43:18.565Z" Recipient="http://localhost:8080/v1/_saml_callback"/></saml2:SubjectConfirmation></saml2:Subject><saml2:Conditions NotBefore="2016-03-28T16:33:18.565Z" NotOnOrAfter="2016-03-28T16:43:18.565Z" xmlns:saml2="urn:oasis:names:tc:SAML:2.0:assertion"><saml2:AudienceRestriction><saml2:Audience>123</saml2:Audience></saml2:AudienceRestriction></saml2:Conditions><saml2:AuthnStatement AuthnInstant="2016-03-28T16:38:18.565Z" SessionIndex="_da213df8-ef95-41d0-b9bf-71d271735cd7" xmlns:saml2="urn:oasis:names:tc:SAML:2.0:assertion"><saml2:AuthnContext><saml2:AuthnContextClassRef>urn:oasis:names:tc:SAML:2.0:ac:classes:PasswordProtectedTransport</saml2:AuthnContextClassRef></saml2:AuthnContext></saml2:AuthnStatement><saml2:AttributeStatement xmlns:saml2="urn:oasis:names:tc:SAML:2.0:assertion"><saml2:Attribute Name="FirstName" NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:unspecified"><saml2:AttributeValue xmlns:xs="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="xs:string">Phoebe</saml2:AttributeValue></saml2:Attribute><saml2:Attribute Name="LastName" NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:unspecified"><saml2:AttributeValue xmlns:xs="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="xs:string">Simon</saml2:AttributeValue></saml2:Attribute><saml2:Attribute Name="Email" NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:unspecified"><saml2:AttributeValue xmlns:xs="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="xs:string">phoebe.simon@scaleft.com</saml2:AttributeValue></saml2:Attribute><saml2:Attribute Name="Login" NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:unspecified"><saml2:AttributeValue xmlns:xs="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="xs:string">phoebe.simon@scaleft.com</saml2:AttributeValue></saml2:Attribute></saml2:AttributeStatement></saml2:Assertion></saml2p:Response>` + +const commentInjectionAttackResponse = ` +http://www.okta.com/exk5zt0r12Edi4rD20h7http://www.okta.com/exk5zt0r12Edi4rD20h7FsWGCBC+t/LaVkUKUvRQpzyZTmlxUzw4R9FOzXPPJRw=hS50WgYs/cn3uxmhrza/0/0QW3H7bwdjPZ2hQmG7IeSd7awTOghBqdrjvaPfQ7tRW+UK6ewMgIBVKG6jV3qYAWeW2U70hMb7hE9qJqBKyYyimmhVWULx1HB2YmlU1wmispywoPlXQ6gj0iWaL2RFI83vUp7X50eZ6dELqoJVZpzQI065Tt0TG7UuKUW1flYsbiS9NaXnuw+mcrBW25ZA9F5CLePHki01ZzUw+XtNmKthEb7SR30mzPoj08Dji22daYvGu82IR01wIZPoQJPCGMT6y2xC/pQPqGljAg/vUa+gaYgaMaAVYxhk/hfgMUBlOeKACBaGTmygab1Nz5KvPg==MIIDpDCCAoygAwIBAgIGAVLIBhAwMA0GCSqGSIb3DQEBBQUAMIGSMQswCQYDVQQGEwJVUzETMBEG +A1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwET2t0YTEU +MBIGA1UECwwLU1NPUHJvdmlkZXIxEzARBgNVBAMMCmRldi0xMTY4MDcxHDAaBgkqhkiG9w0BCQEW +DWluZm9Ab2t0YS5jb20wHhcNMTYwMjA5MjE1MjA2WhcNMjYwMjA5MjE1MzA2WjCBkjELMAkGA1UE +BhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28xDTALBgNV +BAoMBE9rdGExFDASBgNVBAsMC1NTT1Byb3ZpZGVyMRMwEQYDVQQDDApkZXYtMTE2ODA3MRwwGgYJ +KoZIhvcNAQkBFg1pbmZvQG9rdGEuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA +mtjBOZ8MmhUyi8cGk4dUY6Fj1MFDt/q3FFiaQpLzu3/q5lRVUNUBbAtqQWwY10dzfZguHOuvA5p5 +QyiVDvUhe+XkVwN2R2WfArQJRTPnIcOaHrxqQf3o5cCIG21ZtysFHJSo8clPSOe+0VsoRgcJ1aF4 +2rODwgqRRZdO9Wh3502XlJ799DJQ23IC7XasKEsGKzJqhlRrfd/FyIuZT0sFHDKRz5snSJhm9gpN +uQlCmk7ONZ1sXqtt+nBIfWIqeoYQubPW7pT5GTc7wouWq4TCjHJiK9k2HiyNxW0E3JX08swEZi2+ +LVDjgLzNc4lwjSYIj3AOtPZs8s606oBdIBni4wIDAQABMA0GCSqGSIb3DQEBBQUAA4IBAQBMxSkJ +TxkXxsoKNW0awJNpWRbU81QpheMFfENIzLam4Itc/5kSZAaSy/9e2QKfo4jBo/MMbCq2vM9TyeJQ +DJpRaioUTd2lGh4TLUxAxCxtUk/pascL+3Nn936LFmUCLxaxnbeGzPOXAhscCtU1H0nFsXRnKx5a +cPXYSKFZZZktieSkww2Oi8dg2DYaQhGQMSFMVqgVfwEu4bvCRBvdSiNXdWGCZQmFVzBZZ/9rOLzP +pvTFTPnpkavJm81FLlUhiE/oFgKlCDLWDknSpXAI0uZGERcwPca6xvIMh86LjQKjbVci9FYDStXC +qRnqQ+TccSu/B6uONFsDEngGcXSKfB+aphoebe.simon@scaleft.com.evil.com123urn:oasis:names:tc:SAML:2.0:ac:classes:PasswordProtectedTransportPhoebeSimonphoebe.simon@scaleft.comphoebesimon` + +const doubleColonAssertionInjectionAttackResponse = ` + +https://app.onelogin.com/saml/metadata/634027 + +<::Assertion xmlns="urn:oasis:names:tc:SAML:2.0:assertion" xmlns:saml="urn:oasis:names:tc:SAML:2.0:assertion" xmlns:xs="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" Version="2.0" ID="x" IssueInstant="2017-03-08T07:53:39Z">https://app.onelogin.com/saml/metadata/634027gd5V090n/m4JRrtpo5WgrwPyyy0=what@launchdarkly.com{audience}urn:oasis:names:tc:SAML:2.0:ac:classes:PasswordProtectedTransport +https://app.onelogin.com/saml/metadata/634027gd5V090n/m4JRrtpo5WgrwPyyy0=SLzvdNM+1R1+3XsXpC+/RIvb5L4Lhy7Eb7caPG2CLMPYhzbKLAwIiT7/0fEMO/xL7rdIgEShbcU9iu5PX4hGYBhirsFIZvdHytns5+JKHnlVBmHm4TsSU1z+dGMXBa//L0KFSrvdgBUpsr5vs50SuYnnVp61VN+zCLMqO221CQfP95QyMcSQ+fiyq4GOmWLwQy1m1+NV3U8zlapp6FIH5stW/dp4OqpRdafV96rVwmmR4yeUw7VAzbJuMrPgkXO9nUbHeMUTgQxkQ4ThzG5jt6fT+Ro1NOYS4zpVtzqlQwGzqWxQVRLEqXIf500/Qi0NuFQOW42ZAUiXDgdLENTVGA==MIIEJjCCAw6gAwIBAgIUOHrykO4ce1TbjvGgXXVVnR4NsqMwDQYJKoZIhvcNAQEFBQAwXTELMAkGA1UEBhMCVVMxFTATBgNVBAoMDExhdW5jaERhcmtseTEVMBMGA1UECwwMT25lTG9naW4gSWRQMSAwHgYDVQQDDBdPbmVMb2dpbiBBY2NvdW50IDEwMjEyNzAeFw0xNzAzMDYwMjQ2NTNaFw0yMjAzMDcwMjQ2NTNaMF0xCzAJBgNVBAYTAlVTMRUwEwYDVQQKDAxMYXVuY2hEYXJrbHkxFTATBgNVBAsMDE9uZUxvZ2luIElkUDEgMB4GA1UEAwwXT25lTG9naW4gQWNjb3VudCAxMDIxMjcwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCaJ02AnJe5vq+zzkmrIHhRy8V/UxJogbJGEJW6nqrEmO7Q4sXO7dLIKxGccCEz0KAavGKWzSX9uhVvKpazpD4bW80wPQIgFxN3CjiA3qlYIfhhh4emSZo2AnaTuG4BPVGFNPx0jxXGAhh/3xkpIsqARJFPB6njT2+MwFctm3fockx3Yp4e1xoUD8qQR0f/8oq1LjrYd2Vlckmmw7qrzSqS8POHW/I1jx9Y/vAjTPWDKXmbmLcTe3188PDrthSyoBuaAGBRVTP9WTuYMh4kGvmfX6sNvIDGejUcUCq6IObRr4xLSZiGy5uoyqsQc9agAhQm+26Gpq0R3NSvN91JdbZHAgMBAAGjgd0wgdowDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQUnbxBsHgNVq3OSXEuG5EkR0Jd1UswgZoGA1UdIwSBkjCBj4AUnbxBsHgNVq3OSXEuG5EkR0Jd1UuhYaRfMF0xCzAJBgNVBAYTAlVTMRUwEwYDVQQKDAxMYXVuY2hEYXJrbHkxFTATBgNVBAsMDE9uZUxvZ2luIElkUDEgMB4GA1UEAwwXT25lTG9naW4gQWNjb3VudCAxMDIxMjeCFDh68pDuHHtU247xoF11VZ0eDbKjMA4GA1UdDwEB/wQEAwIHgDANBgkqhkiG9w0BAQUFAAOCAQEAL/6j2qpMCrnolwKT7mfPEpA6btbtl0R0t6zSwYUVU9T3PK0/P3LKXvbjSySov0E4R9d5qlOcyj5CbYiuqAO2aON3xy82s0dN3FHRiO6kcjoRPwVIIF0S8x7tpzcPKa42zSPfBqMRw4ezUEzTijFriepkSWST1Btr3QeK2Cxhr0fC1xmw/YK82BV0/oVRslGL27ro+v3/dNY0A0r32Xe2+THomrY/YaZaDCPCjHo8dlxrX3D/mPfoiiKSkm2mGagQXT0giTHVo3oIq+u+KdrBcQn65EBcjfFKDIeFCdiVmO0xPl9mmWskVRLy2/wpuDIp6hnAphl9lj5DY48eBsrEXQ==arun@launchdarkly.com{audience}urn:oasis:names:tc:SAML:2.0:ac:classes:PasswordProtectedTransport + +` diff --git a/vendor/github.com/mattermost/gosaml2/types/encrypted_assertion.go b/vendor/github.com/mattermost/gosaml2/types/encrypted_assertion.go new file mode 100644 index 00000000..150e505e --- /dev/null +++ b/vendor/github.com/mattermost/gosaml2/types/encrypted_assertion.go @@ -0,0 +1,97 @@ +// Copyright 2016 Russell Haering et al. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package types + +import ( + "bytes" + "crypto/cipher" + "crypto/tls" + "encoding/base64" + "encoding/xml" + "fmt" +) + +type EncryptedAssertion struct { + XMLName xml.Name `xml:"urn:oasis:names:tc:SAML:2.0:assertion EncryptedAssertion"` + EncryptionMethod EncryptionMethod `xml:"EncryptedData>EncryptionMethod"` + EncryptedKey EncryptedKey `xml:"EncryptedData>KeyInfo>EncryptedKey"` + DetEncryptedKey EncryptedKey `xml:"EncryptedKey"` // detached EncryptedKey element + CipherValue string `xml:"EncryptedData>CipherData>CipherValue"` +} + +func (ea *EncryptedAssertion) DecryptBytes(cert *tls.Certificate) ([]byte, error) { + data, err := base64.StdEncoding.DecodeString(ea.CipherValue) + if err != nil { + return nil, err + } + + // EncryptedKey must include CipherValue. EncryptedKey may be part of EncryptedData. + ek := &ea.EncryptedKey + if ek.CipherValue == "" { + // Use detached EncryptedKey element (sibling of EncryptedData). See: + // https://www.w3.org/TR/2002/REC-xmlenc-core-20021210/Overview.html#sec-Extensions-to-KeyInfo + ek = &ea.DetEncryptedKey + } + k, err := ek.DecryptSymmetricKey(cert) + if err != nil { + return nil, fmt.Errorf("cannot decrypt, error retrieving private key: %s", err) + } + + switch ea.EncryptionMethod.Algorithm { + case MethodAES128GCM, MethodAES192GCM, MethodAES256GCM: + c, err := cipher.NewGCM(k) + if err != nil { + return nil, fmt.Errorf("cannot create AES-GCM: %s", err) + } + + nonce, data := data[:c.NonceSize()], data[c.NonceSize():] + plainText, err := c.Open(nil, nonce, data, nil) + if err != nil { + return nil, fmt.Errorf("cannot open AES-GCM: %s", err) + } + return plainText, nil + case MethodAES128CBC, MethodAES256CBC, MethodTripleDESCBC: + nonce, data := data[:k.BlockSize()], data[k.BlockSize():] + c := cipher.NewCBCDecrypter(k, nonce) + c.CryptBlocks(data, data) + + // Remove zero bytes + data = bytes.TrimRight(data, "\x00") + + // Calculate index to remove based on padding + padLength := data[len(data)-1] + lastGoodIndex := len(data) - int(padLength) + return data[:lastGoodIndex], nil + default: + return nil, fmt.Errorf("unknown symmetric encryption method %#v", ea.EncryptionMethod.Algorithm) + } +} + +// Decrypt decrypts and unmarshals the EncryptedAssertion. +func (ea *EncryptedAssertion) Decrypt(cert *tls.Certificate) (*Assertion, error) { + plaintext, err := ea.DecryptBytes(cert) + if err != nil { + return nil, fmt.Errorf("Error decrypting assertion: %v", err) + } + + assertion := &Assertion{} + + err = xml.Unmarshal(plaintext, assertion) + if err != nil { + return nil, fmt.Errorf("Error unmarshaling assertion: %v", err) + } + + return assertion, nil +} diff --git a/vendor/github.com/mattermost/gosaml2/types/encrypted_key.go b/vendor/github.com/mattermost/gosaml2/types/encrypted_key.go new file mode 100644 index 00000000..098386c3 --- /dev/null +++ b/vendor/github.com/mattermost/gosaml2/types/encrypted_key.go @@ -0,0 +1,196 @@ +// Copyright 2016 Russell Haering et al. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package types + +import ( + "bytes" + "crypto/aes" + "crypto/cipher" + "crypto/des" + "crypto/rand" + "crypto/rsa" + "crypto/sha1" + "crypto/sha256" + "crypto/sha512" + "crypto/tls" + "encoding/base64" + "encoding/hex" + "fmt" + "hash" + "strings" +) + +//EncryptedKey contains the decryption key data from the saml2 core and xmlenc +//standards. +type EncryptedKey struct { + // EncryptionMethod string `xml:"EncryptionMethod>Algorithm"` + X509Data string `xml:"KeyInfo>X509Data>X509Certificate"` + CipherValue string `xml:"CipherData>CipherValue"` + EncryptionMethod EncryptionMethod +} + +//EncryptionMethod specifies the type of encryption that was used. +type EncryptionMethod struct { + Algorithm string `xml:",attr,omitempty"` + //Digest method is present for algorithms like RSA-OAEP. + //See https://www.w3.org/TR/xmlenc-core1/. + //To convey the digest methods an entity supports, + //DigestMethod in extensions element is used. + //See http://docs.oasis-open.org/security/saml/Post2.0/sstc-saml-metadata-algsupport.html. + DigestMethod *DigestMethod `xml:",omitempty"` +} + +//DigestMethod is a digest type specification +type DigestMethod struct { + Algorithm string `xml:",attr,omitempty"` +} + +//Well-known public-key encryption methods +const ( + MethodRSAOAEP = "http://www.w3.org/2001/04/xmlenc#rsa-oaep-mgf1p" + MethodRSAOAEP2 = "http://www.w3.org/2009/xmlenc11#rsa-oaep" + MethodRSAv1_5 = "http://www.w3.org/2001/04/xmlenc#rsa-1_5" +) + +//Well-known private key encryption methods +const ( + MethodAES128GCM = "http://www.w3.org/2009/xmlenc11#aes128-gcm" + MethodAES192GCM = "http://www.w3.org/2009/xmlenc11#aes192-gcm" + MethodAES256GCM = "http://www.w3.org/2009/xmlenc11#aes256-gcm" + MethodAES128CBC = "http://www.w3.org/2001/04/xmlenc#aes128-cbc" + MethodAES256CBC = "http://www.w3.org/2001/04/xmlenc#aes256-cbc" + MethodTripleDESCBC = "http://www.w3.org/2001/04/xmlenc#tripledes-cbc" +) + +//Well-known hash methods +const ( + MethodSHA1 = "http://www.w3.org/2000/09/xmldsig#sha1" + MethodSHA256 = "http://www.w3.org/2000/09/xmldsig#sha256" + MethodSHA512 = "http://www.w3.org/2000/09/xmldsig#sha512" +) + +//SHA-1 is commonly used for certificate fingerprints (openssl -fingerprint and ADFS thumbprint). +//SHA-1 is sufficient for our purposes here (error message). +func debugKeyFp(keyBytes []byte) string { + if len(keyBytes) < 1 { + return "" + } + hashFunc := sha1.New() + hashFunc.Write(keyBytes) + sum := strings.ToLower(hex.EncodeToString(hashFunc.Sum(nil))) + var ret string + for idx := 0; idx+1 < len(sum); idx += 2 { + if idx == 0 { + ret += sum[idx : idx+2] + } else { + ret += ":" + sum[idx:idx+2] + } + } + return ret +} + +//DecryptSymmetricKey returns the private key contained in the EncryptedKey document +func (ek *EncryptedKey) DecryptSymmetricKey(cert *tls.Certificate) (cipher.Block, error) { + if len(cert.Certificate) < 1 { + return nil, fmt.Errorf("decryption tls.Certificate has no public certs attached") + } + + // The EncryptedKey may or may not include X509Data (certificate). + // If included, the EncryptedKey certificate: + // - is FYI only (fail if it does not match the SP certificate) + // - is NOT used to decrypt CipherData + if ek.X509Data != "" { + if encCert, err := base64.StdEncoding.DecodeString(ek.X509Data); err != nil { + return nil, fmt.Errorf("error decoding EncryptedKey certificate: %v", err) + } else if !bytes.Equal(cert.Certificate[0], encCert) { + return nil, fmt.Errorf("key decryption attempted with mismatched cert, SP cert(%.11s), assertion cert(%.11s)", + debugKeyFp(cert.Certificate[0]), debugKeyFp(encCert)) + } + } + + cipherText, err := base64.StdEncoding.DecodeString(ek.CipherValue) + if err != nil { + return nil, err + } + + switch pk := cert.PrivateKey.(type) { + case *rsa.PrivateKey: + var h hash.Hash + + if ek.EncryptionMethod.DigestMethod == nil { + //if digest method is not present lets set default method to SHA1. + //Digest method is used by methods like RSA-OAEP. + h = sha1.New() + } else { + switch ek.EncryptionMethod.DigestMethod.Algorithm { + case "", MethodSHA1: + h = sha1.New() // default + case MethodSHA256: + h = sha256.New() + case MethodSHA512: + h = sha512.New() + default: + return nil, fmt.Errorf("unsupported digest algorithm: %v", + ek.EncryptionMethod.DigestMethod.Algorithm) + } + } + + switch ek.EncryptionMethod.Algorithm { + case "": + return nil, fmt.Errorf("missing encryption algorithm") + case MethodRSAOAEP, MethodRSAOAEP2: + pt, err := rsa.DecryptOAEP(h, rand.Reader, pk, cipherText, nil) + if err != nil { + return nil, fmt.Errorf("rsa internal error: %v", err) + } + + b, err := aes.NewCipher(pt) + if err != nil { + return nil, err + } + + return b, nil + case MethodRSAv1_5: + pt, err := rsa.DecryptPKCS1v15(rand.Reader, pk, cipherText) + if err != nil { + return nil, fmt.Errorf("rsa internal error: %v", err) + } + + //From https://docs.oasis-open.org/security/saml/v2.0/saml-core-2.0-os.pdf the xml encryption + //methods to be supported are from http://www.w3.org/2001/04/xmlenc#Element. + //https://www.w3.org/TR/2002/REC-xmlenc-core-20021210/Overview.html#Element. + //https://www.w3.org/TR/2002/REC-xmlenc-core-20021210/#sec-Algorithms + //Sec 5.4 Key Transport: + //The RSA v1.5 Key Transport algorithm given below are those used in conjunction with TRIPLEDES + //Please also see https://www.w3.org/TR/xmlenc-core/#sec-Algorithms and + //https://www.w3.org/TR/xmlenc-core/#rsav15note. + b, err := des.NewTripleDESCipher(pt) + if err != nil { + return nil, err + } + + // FIXME: The version we had previously in our fork, AES seems more secure from my Googling. + // b, err := aes.NewCipher(pt) + // if err != nil { + // return nil, err + // } + + return b, nil + default: + return nil, fmt.Errorf("unsupported encryption algorithm: %s", ek.EncryptionMethod.Algorithm) + } + } + return nil, fmt.Errorf("no cipher for decoding symmetric key") +} diff --git a/vendor/github.com/mattermost/gosaml2/types/metadata.go b/vendor/github.com/mattermost/gosaml2/types/metadata.go new file mode 100644 index 00000000..84e07006 --- /dev/null +++ b/vendor/github.com/mattermost/gosaml2/types/metadata.go @@ -0,0 +1,102 @@ +// Copyright 2016 Russell Haering et al. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package types + +import ( + "encoding/xml" + "time" + + dsigtypes "github.com/russellhaering/goxmldsig/types" +) + +type EntityDescriptor struct { + XMLName xml.Name `xml:"urn:oasis:names:tc:SAML:2.0:metadata EntityDescriptor"` + ValidUntil time.Time `xml:"validUntil,attr"` + // SAML 2.0 8.3.6 Entity Identifier could be used to represent issuer + EntityID string `xml:"entityID,attr"` + SPSSODescriptor *SPSSODescriptor `xml:"SPSSODescriptor,omitempty"` + IDPSSODescriptor *IDPSSODescriptor `xml:"IDPSSODescriptor,omitempty"` + Extensions *Extensions `xml:"Extensions,omitempty"` +} + +type Endpoint struct { + Binding string `xml:"Binding,attr"` + Location string `xml:"Location,attr"` + ResponseLocation string `xml:"ResponseLocation,attr,omitempty"` +} + +type IndexedEndpoint struct { + Binding string `xml:"Binding,attr"` + Location string `xml:"Location,attr"` + Index int `xml:"index,attr"` +} + +type SPSSODescriptor struct { + XMLName xml.Name `xml:"urn:oasis:names:tc:SAML:2.0:metadata SPSSODescriptor"` + AuthnRequestsSigned bool `xml:"AuthnRequestsSigned,attr"` + WantAssertionsSigned bool `xml:"WantAssertionsSigned,attr"` + ProtocolSupportEnumeration string `xml:"protocolSupportEnumeration,attr"` + KeyDescriptors []KeyDescriptor `xml:"KeyDescriptor"` + SingleLogoutServices []Endpoint `xml:"SingleLogoutService"` + NameIDFormats []string `xml:"NameIDFormat"` + AssertionConsumerServices []IndexedEndpoint `xml:"AssertionConsumerService"` + Extensions *Extensions `xml:"Extensions,omitempty"` +} + +type IDPSSODescriptor struct { + XMLName xml.Name `xml:"urn:oasis:names:tc:SAML:2.0:metadata IDPSSODescriptor"` + WantAuthnRequestsSigned bool `xml:"WantAuthnRequestsSigned,attr"` + KeyDescriptors []KeyDescriptor `xml:"KeyDescriptor"` + NameIDFormats []NameIDFormat `xml:"NameIDFormat"` + SingleSignOnServices []SingleSignOnService `xml:"SingleSignOnService"` + SingleLogoutServices []SingleLogoutService `xml:"SingleLogoutService"` + Attributes []Attribute `xml:"Attribute"` + Extensions *Extensions `xml:"Extensions,omitempty"` +} + +type KeyDescriptor struct { + XMLName xml.Name `xml:"urn:oasis:names:tc:SAML:2.0:metadata KeyDescriptor"` + Use string `xml:"use,attr"` + KeyInfo dsigtypes.KeyInfo `xml:"KeyInfo"` + EncryptionMethods []EncryptionMethod `xml:"EncryptionMethod"` +} + +type NameIDFormat struct { + XMLName xml.Name `xml:"urn:oasis:names:tc:SAML:2.0:metadata NameIDFormat"` + Value string `xml:",chardata"` +} + +type SingleSignOnService struct { + XMLName xml.Name `xml:"urn:oasis:names:tc:SAML:2.0:metadata SingleSignOnService"` + Binding string `xml:"Binding,attr"` + Location string `xml:"Location,attr"` +} + +type SingleLogoutService struct { + XMLName xml.Name `xml:"urn:oasis:names:tc:SAML:2.0:metadata SingleLogoutService"` + Binding string `xml:"Binding,attr"` + Location string `xml:"Location,attr"` +} + +type SigningMethod struct { + Algorithm string `xml:",attr"` + MinKeySize string `xml:"MinKeySize,attr,omitempty"` + MaxKeySize string `xml:"MaxKeySize,attr,omitempty"` +} + +type Extensions struct { + DigestMethod *DigestMethod `xml:",omitempty"` + SigningMethod *SigningMethod `xml:",omitempty"` +} diff --git a/vendor/github.com/mattermost/gosaml2/types/response.go b/vendor/github.com/mattermost/gosaml2/types/response.go new file mode 100644 index 00000000..70a730e7 --- /dev/null +++ b/vendor/github.com/mattermost/gosaml2/types/response.go @@ -0,0 +1,187 @@ +// Copyright 2016 Russell Haering et al. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package types + +import ( + "encoding/xml" + "time" +) + +// UnverifiedBaseResponse extracts several basic attributes of a SAML Response +// which may be useful in deciding how to validate the Response. An UnverifiedBaseResponse +// is parsed by this library prior to any validation of the Response, so the +// values it contains may have been supplied by an attacker and should not be +// trusted as authoritative from the IdP. +type UnverifiedBaseResponse struct { + XMLName xml.Name `xml:"urn:oasis:names:tc:SAML:2.0:protocol Response"` + ID string `xml:"ID,attr"` + InResponseTo string `xml:"InResponseTo,attr"` + Destination string `xml:"Destination,attr"` + Version string `xml:"Version,attr"` + Issuer *Issuer `xml:"Issuer"` +} + +type Response struct { + XMLName xml.Name `xml:"urn:oasis:names:tc:SAML:2.0:protocol Response"` + ID string `xml:"ID,attr"` + InResponseTo string `xml:"InResponseTo,attr"` + Destination string `xml:"Destination,attr"` + Version string `xml:"Version,attr"` + IssueInstant time.Time `xml:"IssueInstant,attr"` + Status *Status `xml:"Status"` + Issuer *Issuer `xml:"Issuer"` + Assertions []Assertion `xml:"Assertion"` + EncryptedAssertions []EncryptedAssertion `xml:"EncryptedAssertion"` + SignatureValidated bool `xml:"-"` // not read, not dumped +} + +type LogoutResponse struct { + XMLName xml.Name `xml:"urn:oasis:names:tc:SAML:2.0:protocol LogoutResponse"` + ID string `xml:"ID,attr"` + InResponseTo string `xml:"InResponseTo,attr"` + Destination string `xml:"Destination,attr"` + Version string `xml:"Version,attr"` + IssueInstant time.Time `xml:"IssueInstant,attr"` + Status *Status `xml:"Status"` + Issuer *Issuer `xml:"Issuer"` + SignatureValidated bool `xml:"-"` // not read, not dumped +} + +type Status struct { + XMLName xml.Name `xml:"urn:oasis:names:tc:SAML:2.0:protocol Status"` + StatusCode *StatusCode `xml:"StatusCode"` +} + +type StatusCode struct { + XMLName xml.Name `xml:"urn:oasis:names:tc:SAML:2.0:protocol StatusCode"` + Value string `xml:"Value,attr"` +} + +type Issuer struct { + XMLName xml.Name `xml:"urn:oasis:names:tc:SAML:2.0:assertion Issuer"` + Value string `xml:",chardata"` +} + +type Signature struct { + SignatureDocument []byte `xml:",innerxml"` +} + +type Assertion struct { + XMLName xml.Name `xml:"urn:oasis:names:tc:SAML:2.0:assertion Assertion"` + Version string `xml:"Version,attr"` + ID string `xml:"ID,attr"` + IssueInstant time.Time `xml:"IssueInstant,attr"` + Issuer *Issuer `xml:"Issuer"` + Signature *Signature `xml:"Signature"` + Subject *Subject `xml:"Subject"` + Conditions *Conditions `xml:"Conditions"` + AttributeStatement *AttributeStatement `xml:"AttributeStatement"` + AuthnStatement *AuthnStatement `xml:"AuthnStatement"` + SignatureValidated bool `xml:"-"` // not read, not dumped +} + +type Subject struct { + XMLName xml.Name `xml:"urn:oasis:names:tc:SAML:2.0:assertion Subject"` + NameID *NameID `xml:"NameID"` + SubjectConfirmation *SubjectConfirmation `xml:"SubjectConfirmation"` +} + +type AuthnContext struct { + XMLName xml.Name `xml:urn:oasis:names:tc:SAML:2.0:assertion AuthnContext"` + AuthnContextClassRef *AuthnContextClassRef `xml:"AuthnContextClassRef"` +} + +type AuthnContextClassRef struct { + XMLName xml.Name `xml:urn:oasis:names:tc:SAML:2.0:assertion AuthnContextClassRef"` + Value string `xml:",chardata"` +} + +type NameID struct { + XMLName xml.Name `xml:"urn:oasis:names:tc:SAML:2.0:assertion NameID"` + Value string `xml:",chardata"` +} + +type SubjectConfirmation struct { + XMLName xml.Name `xml:"urn:oasis:names:tc:SAML:2.0:assertion SubjectConfirmation"` + Method string `xml:"Method,attr"` + SubjectConfirmationData *SubjectConfirmationData `xml:"SubjectConfirmationData"` +} + +type SubjectConfirmationData struct { + XMLName xml.Name `xml:"urn:oasis:names:tc:SAML:2.0:assertion SubjectConfirmationData"` + NotOnOrAfter string `xml:"NotOnOrAfter,attr"` + Recipient string `xml:"Recipient,attr"` + InResponseTo string `xml:"InResponseTo,attr"` +} + +type Conditions struct { + XMLName xml.Name `xml:"urn:oasis:names:tc:SAML:2.0:assertion Conditions"` + NotBefore string `xml:"NotBefore,attr"` + NotOnOrAfter string `xml:"NotOnOrAfter,attr"` + AudienceRestrictions []AudienceRestriction `xml:"AudienceRestriction"` + OneTimeUse *OneTimeUse `xml:"OneTimeUse"` + ProxyRestriction *ProxyRestriction `xml:"ProxyRestriction"` +} + +type AudienceRestriction struct { + XMLName xml.Name `xml:"urn:oasis:names:tc:SAML:2.0:assertion AudienceRestriction"` + Audiences []Audience `xml:"Audience"` +} + +type Audience struct { + XMLName xml.Name `xml:"urn:oasis:names:tc:SAML:2.0:assertion Audience"` + Value string `xml:",chardata"` +} + +type OneTimeUse struct { + XMLName xml.Name `xml:"urn:oasis:names:tc:SAML:2.0:assertion OneTimeUse"` +} + +type ProxyRestriction struct { + XMLName xml.Name `xml:"urn:oasis:names:tc:SAML:2.0:assertion ProxyRestriction"` + Count int `xml:"Count,attr"` + Audience []Audience `xml:"Audience"` +} + +type AttributeStatement struct { + XMLName xml.Name `xml:"urn:oasis:names:tc:SAML:2.0:assertion AttributeStatement"` + Attributes []Attribute `xml:"Attribute"` +} + +type Attribute struct { + XMLName xml.Name `xml:"urn:oasis:names:tc:SAML:2.0:assertion Attribute"` + FriendlyName string `xml:"FriendlyName,attr"` + Name string `xml:"Name,attr"` + NameFormat string `xml:"NameFormat,attr"` + Values []AttributeValue `xml:"AttributeValue"` +} + +type AttributeValue struct { + XMLName xml.Name `xml:"urn:oasis:names:tc:SAML:2.0:assertion AttributeValue"` + Type string `xml:"xsi:type,attr"` + Value string `xml:",chardata"` +} + +type AuthnStatement struct { + XMLName xml.Name `xml:"urn:oasis:names:tc:SAML:2.0:assertion AuthnStatement"` + //Section 4.1.4.2 - https://docs.oasis-open.org/security/saml/v2.0/saml-profiles-2.0-os.pdf + //If the identity provider supports the Single Logout profile, defined in Section 4.4 + //, any such authentication statements MUST include a SessionIndex attribute to enable + //per-session logout requests by the service provider. + SessionIndex string `xml:"SessionIndex,attr,omitempty"` + AuthnInstant *time.Time `xml:"AuthnInstant,attr,omitempty"` + SessionNotOnOrAfter *time.Time `xml:"SessionNotOnOrAfter,attr,omitempty"` + AuthnContext *AuthnContext `xml:"AuthnContext"` +} diff --git a/vendor/github.com/mattermost/gosaml2/uuid/uuid.go b/vendor/github.com/mattermost/gosaml2/uuid/uuid.go new file mode 100644 index 00000000..8dcb7a5c --- /dev/null +++ b/vendor/github.com/mattermost/gosaml2/uuid/uuid.go @@ -0,0 +1,41 @@ +// Copyright 2016 Russell Haering et al. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package uuid + +// relevant bits from https://github.com/abneptis/GoUUID/blob/master/uuid.go + +import ( + "crypto/rand" + "fmt" +) + +type UUID [16]byte + +// NewV4 returns random generated UUID. +func NewV4() *UUID { + u := &UUID{} + _, err := rand.Read(u[:16]) + if err != nil { + panic(err) + } + + u[8] = (u[8] | 0x80) & 0xBf + u[6] = (u[6] | 0x40) & 0x4f + return u +} + +func (u *UUID) String() string { + return fmt.Sprintf("%x-%x-%x-%x-%x", u[:4], u[4:6], u[6:8], u[8:10], u[10:]) +} diff --git a/vendor/github.com/mattermost/gosaml2/validate.go b/vendor/github.com/mattermost/gosaml2/validate.go new file mode 100644 index 00000000..14a0f0dd --- /dev/null +++ b/vendor/github.com/mattermost/gosaml2/validate.go @@ -0,0 +1,309 @@ +// Copyright 2016 Russell Haering et al. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package saml2 + +import ( + "fmt" + "time" + + "github.com/mattermost/gosaml2/types" +) + +//ErrParsing indicates that the value present in an assertion could not be +//parsed. It can be inspected for the specific tag name, the contents, and the +//intended type. +type ErrParsing struct { + Tag, Value, Type string +} + +func (ep ErrParsing) Error() string { + return fmt.Sprintf("Error parsing %s tag value as type %s", ep.Tag, ep.Value) +} + +//Oft-used messages +const ( + ReasonUnsupported = "Unsupported" + ReasonExpired = "Expired" +) + +//ErrInvalidValue indicates that the expected value did not match the received +//value. +type ErrInvalidValue struct { + Key, Expected, Actual string + Reason string +} + +func (e ErrInvalidValue) Error() string { + if e.Reason == "" { + e.Reason = "Unrecognized" + } + return fmt.Sprintf("%s %s value, Expected: %s, Actual: %s", e.Reason, e.Key, e.Expected, e.Actual) +} + +//Well-known methods of subject confirmation +const ( + SubjMethodBearer = "urn:oasis:names:tc:SAML:2.0:cm:bearer" +) + +//VerifyAssertionConditions inspects an assertion element and makes sure that +//all SAML2 contracts are upheld. +func (sp *SAMLServiceProvider) VerifyAssertionConditions(assertion *types.Assertion) (*WarningInfo, error) { + warningInfo := &WarningInfo{} + now := sp.Clock.Now() + + conditions := assertion.Conditions + if conditions == nil { + return nil, ErrMissingElement{Tag: ConditionsTag} + } + + if conditions.NotBefore == "" { + return nil, ErrMissingElement{Tag: ConditionsTag, Attribute: NotBeforeAttr} + } + + notBefore, err := time.Parse(time.RFC3339, conditions.NotBefore) + if err != nil { + return nil, ErrParsing{Tag: NotBeforeAttr, Value: conditions.NotBefore, Type: "time.RFC3339"} + } + + if now.Before(notBefore) { + warningInfo.InvalidTime = true + } + + if conditions.NotOnOrAfter == "" { + return nil, ErrMissingElement{Tag: ConditionsTag, Attribute: NotOnOrAfterAttr} + } + + notOnOrAfter, err := time.Parse(time.RFC3339, conditions.NotOnOrAfter) + if err != nil { + return nil, ErrParsing{Tag: NotOnOrAfterAttr, Value: conditions.NotOnOrAfter, Type: "time.RFC3339"} + } + + if now.After(notOnOrAfter) { + warningInfo.InvalidTime = true + } + + for _, audienceRestriction := range conditions.AudienceRestrictions { + matched := false + + for _, audience := range audienceRestriction.Audiences { + if audience.Value == sp.AudienceURI { + matched = true + break + } + } + + if !matched { + warningInfo.NotInAudience = true + break + } + } + + if conditions.OneTimeUse != nil { + warningInfo.OneTimeUse = true + } + + proxyRestriction := conditions.ProxyRestriction + if proxyRestriction != nil { + proxyRestrictionInfo := &ProxyRestriction{ + Count: proxyRestriction.Count, + Audience: []string{}, + } + + for _, audience := range proxyRestriction.Audience { + proxyRestrictionInfo.Audience = append(proxyRestrictionInfo.Audience, audience.Value) + } + + warningInfo.ProxyRestriction = proxyRestrictionInfo + } + + return warningInfo, nil +} + +//Validate ensures that the assertion passed is valid for the current Service +//Provider. +func (sp *SAMLServiceProvider) Validate(response *types.Response) error { + err := sp.validateResponseAttributes(response) + if err != nil { + return err + } + + if len(response.Assertions) == 0 { + return ErrMissingAssertion + } + + issuer := response.Issuer + if issuer == nil { + // FIXME?: SAML Core 2.0 Section 3.2.2 has Response.Issuer as [Optional] + return ErrMissingElement{Tag: IssuerTag} + } + + if sp.IdentityProviderIssuer != "" && response.Issuer.Value != sp.IdentityProviderIssuer { + return ErrInvalidValue{ + Key: IssuerTag, + Expected: sp.IdentityProviderIssuer, + Actual: response.Issuer.Value, + } + } + + status := response.Status + if status == nil { + return ErrMissingElement{Tag: StatusTag} + } + + statusCode := status.StatusCode + if statusCode == nil { + return ErrMissingElement{Tag: StatusCodeTag} + } + + if statusCode.Value != StatusCodeSuccess { + return ErrInvalidValue{ + Key: StatusCodeTag, + Expected: StatusCodeSuccess, + Actual: statusCode.Value, + } + } + + for _, assertion := range response.Assertions { + issuer = assertion.Issuer + if issuer == nil { + return ErrMissingElement{Tag: IssuerTag} + } + if sp.IdentityProviderIssuer != "" && assertion.Issuer.Value != sp.IdentityProviderIssuer { + return ErrInvalidValue{ + Key: IssuerTag, + Expected: sp.IdentityProviderIssuer, + Actual: issuer.Value, + } + } + + subject := assertion.Subject + if subject == nil { + return ErrMissingElement{Tag: SubjectTag} + } + + subjectConfirmation := subject.SubjectConfirmation + if subjectConfirmation == nil { + return ErrMissingElement{Tag: SubjectConfirmationTag} + } + + if subjectConfirmation.Method != SubjMethodBearer { + return ErrInvalidValue{ + Reason: ReasonUnsupported, + Key: SubjectConfirmationTag, + Expected: SubjMethodBearer, + Actual: subjectConfirmation.Method, + } + } + + subjectConfirmationData := subjectConfirmation.SubjectConfirmationData + if subjectConfirmationData == nil { + return ErrMissingElement{Tag: SubjectConfirmationDataTag} + } + + if subjectConfirmationData.Recipient != sp.AssertionConsumerServiceURL { + return ErrInvalidValue{ + Key: RecipientAttr, + Expected: sp.AssertionConsumerServiceURL, + Actual: subjectConfirmationData.Recipient, + } + } + + if subjectConfirmationData.NotOnOrAfter == "" { + return ErrMissingElement{Tag: SubjectConfirmationDataTag, Attribute: NotOnOrAfterAttr} + } + + notOnOrAfter, err := time.Parse(time.RFC3339, subjectConfirmationData.NotOnOrAfter) + if err != nil { + return ErrParsing{Tag: NotOnOrAfterAttr, Value: subjectConfirmationData.NotOnOrAfter, Type: "time.RFC3339"} + } + + now := sp.Clock.Now() + if now.After(notOnOrAfter) { + return ErrInvalidValue{ + Reason: ReasonExpired, + Key: NotOnOrAfterAttr, + Expected: now.Format(time.RFC3339), + Actual: subjectConfirmationData.NotOnOrAfter, + } + } + + } + + return nil +} + +func (sp *SAMLServiceProvider) ValidateDecodedLogoutResponse(response *types.LogoutResponse) error { + err := sp.validateLogoutResponseAttributes(response) + if err != nil { + return err + } + + issuer := response.Issuer + if issuer == nil { + // FIXME?: SAML Core 2.0 Section 3.2.2 has Response.Issuer as [Optional] + return ErrMissingElement{Tag: IssuerTag} + } + + if sp.IdentityProviderIssuer != "" && response.Issuer.Value != sp.IdentityProviderIssuer { + return ErrInvalidValue{ + Key: IssuerTag, + Expected: sp.IdentityProviderIssuer, + Actual: response.Issuer.Value, + } + } + + status := response.Status + if status == nil { + return ErrMissingElement{Tag: StatusTag} + } + + statusCode := status.StatusCode + if statusCode == nil { + return ErrMissingElement{Tag: StatusCodeTag} + } + + if statusCode.Value != StatusCodeSuccess { + return ErrInvalidValue{ + Key: StatusCodeTag, + Expected: StatusCodeSuccess, + Actual: statusCode.Value, + } + } + + return nil +} + +func (sp *SAMLServiceProvider) ValidateDecodedLogoutRequest(request *LogoutRequest) error { + err := sp.validateLogoutRequestAttributes(request) + if err != nil { + return err + } + + issuer := request.Issuer + if issuer == nil { + // FIXME?: SAML Core 2.0 Section 3.2.2 has Response.Issuer as [Optional] + return ErrMissingElement{Tag: IssuerTag} + } + + if sp.IdentityProviderIssuer != "" && request.Issuer.Value != sp.IdentityProviderIssuer { + return ErrInvalidValue{ + Key: IssuerTag, + Expected: sp.IdentityProviderIssuer, + Actual: request.Issuer.Value, + } + } + + return nil +} diff --git a/vendor/github.com/mattermost/gosaml2/xml_constants.go b/vendor/github.com/mattermost/gosaml2/xml_constants.go new file mode 100644 index 00000000..de36b477 --- /dev/null +++ b/vendor/github.com/mattermost/gosaml2/xml_constants.go @@ -0,0 +1,74 @@ +// Copyright 2016 Russell Haering et al. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package saml2 + +const ( + ResponseTag = "Response" + AssertionTag = "Assertion" + EncryptedAssertionTag = "EncryptedAssertion" + SubjectTag = "Subject" + NameIdTag = "NameID" + SubjectConfirmationTag = "SubjectConfirmation" + SubjectConfirmationDataTag = "SubjectConfirmationData" + AttributeStatementTag = "AttributeStatement" + AttributeValueTag = "AttributeValue" + ConditionsTag = "Conditions" + AudienceRestrictionTag = "AudienceRestriction" + AudienceTag = "Audience" + OneTimeUseTag = "OneTimeUse" + ProxyRestrictionTag = "ProxyRestriction" + IssuerTag = "Issuer" + StatusTag = "Status" + StatusCodeTag = "StatusCode" +) + +const ( + DestinationAttr = "Destination" + VersionAttr = "Version" + IdAttr = "ID" + MethodAttr = "Method" + RecipientAttr = "Recipient" + NameAttr = "Name" + NotBeforeAttr = "NotBefore" + NotOnOrAfterAttr = "NotOnOrAfter" + CountAttr = "Count" +) + +const ( + NameIdFormatPersistent = "urn:oasis:names:tc:SAML:2.0:nameid-format:persistent" + NameIdFormatTransient = "urn:oasis:names:tc:SAML:2.0:nameid-format:transient" + NameIdFormatEmailAddress = "urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress" + NameIdFormatUnspecified = "urn:oasis:names:tc:SAML:1.1:nameid-format:unspecified" + NameIdFormatX509SubjectName = "urn:oasis:names:tc:SAML:1.1:nameid-format:x509SubjectName" + + AuthnContextPasswordProtectedTransport = "urn:oasis:names:tc:SAML:2.0:ac:classes:PasswordProtectedTransport" + + AuthnPolicyMatchExact = "exact" + AuthnPolicyMatchMinimum = "minimum" + AuthnPolicyMatchMaximum = "maximum" + AuthnPolicyMatchBetter = "better" + + StatusCodeSuccess = "urn:oasis:names:tc:SAML:2.0:status:Success" + StatusCodePartialLogout = "urn:oasis:names:tc:SAML:2.0:status:PartialLogout" + StatusCodeUnknownPrincipal = "urn:oasis:names:tc:SAML:2.0:status:UnknownPrincipal" + + BindingHttpPost = "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST" + BindingHttpRedirect = "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect" +) + +const ( + SAMLAssertionNamespace = "urn:oasis:names:tc:SAML:2.0:assertion" + SAMLProtocolNamespace = "urn:oasis:names:tc:SAML:2.0:protocol" +) diff --git a/vendor/github.com/mattermost/ldap/.gitignore b/vendor/github.com/mattermost/ldap/.gitignore new file mode 100644 index 00000000..e69de29b diff --git a/vendor/github.com/mattermost/ldap/.travis.yml b/vendor/github.com/mattermost/ldap/.travis.yml new file mode 100644 index 00000000..f2538fd5 --- /dev/null +++ b/vendor/github.com/mattermost/ldap/.travis.yml @@ -0,0 +1,32 @@ +sudo: false +language: go +go: + - "1.5.x" + - "1.6.x" + - "1.7.x" + - "1.8.x" + - "1.9.x" + - "1.10.x" + - "1.11.x" + - "1.12.x" + - "1.13.x" + - tip + +git: + depth: 1 + +matrix: + fast_finish: true + allow_failures: + - go: tip +go_import_path: github.com/go-ldap/ldap +install: + - go get github.com/go-asn1-ber/asn1-ber + - go get code.google.com/p/go.tools/cmd/cover || go get golang.org/x/tools/cmd/cover + - go get github.com/golang/lint/golint || go get golang.org/x/lint/golint || true + - go build -v ./... +script: + - make test + - make fmt + - make vet + - make lint diff --git a/vendor/github.com/mattermost/ldap/CONTRIBUTING.md b/vendor/github.com/mattermost/ldap/CONTRIBUTING.md new file mode 100644 index 00000000..a7885231 --- /dev/null +++ b/vendor/github.com/mattermost/ldap/CONTRIBUTING.md @@ -0,0 +1,12 @@ +# Contribution Guidelines + +We welcome contribution and improvements. + +## Guiding Principles + +To begin with here is a draft from an email exchange: + + * take compatibility seriously (our semvers, compatibility with older go versions, etc) + * don't tag untested code for release + * beware of baking in implicit behavior based on other libraries/tools choices + * be as high-fidelity as possible in plumbing through LDAP data (don't mask errors or reduce power of someone using the library) diff --git a/vendor/github.com/mattermost/ldap/LICENSE b/vendor/github.com/mattermost/ldap/LICENSE new file mode 100644 index 00000000..6c0ed4b3 --- /dev/null +++ b/vendor/github.com/mattermost/ldap/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2011-2015 Michael Mitton (mmitton@gmail.com) +Portions copyright (c) 2015-2016 go-ldap Authors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/mattermost/ldap/Makefile b/vendor/github.com/mattermost/ldap/Makefile new file mode 100644 index 00000000..c4966472 --- /dev/null +++ b/vendor/github.com/mattermost/ldap/Makefile @@ -0,0 +1,82 @@ +.PHONY: default install build test quicktest fmt vet lint + +# List of all release tags "supported" by our current Go version +# E.g. ":go1.1:go1.2:go1.3:go1.4:go1.5:go1.6:go1.7:go1.8:go1.9:go1.10:go1.11:go1.12:" +GO_RELEASE_TAGS := $(shell go list -f ':{{join (context.ReleaseTags) ":"}}:' runtime) + +# Only use the `-race` flag on newer versions of Go (version 1.3 and newer) +ifeq (,$(findstring :go1.3:,$(GO_RELEASE_TAGS))) + RACE_FLAG := +else + RACE_FLAG := -race -cpu 1,2,4 +endif + +# Run `go vet` on Go 1.12 and newer. For Go 1.5-1.11, use `go tool vet` +ifneq (,$(findstring :go1.12:,$(GO_RELEASE_TAGS))) + GO_VET := go vet \ + -atomic \ + -bool \ + -copylocks \ + -nilfunc \ + -printf \ + -rangeloops \ + -unreachable \ + -unsafeptr \ + -unusedresult \ + . +else ifneq (,$(findstring :go1.5:,$(GO_RELEASE_TAGS))) + GO_VET := go tool vet \ + -atomic \ + -bool \ + -copylocks \ + -nilfunc \ + -printf \ + -shadow \ + -rangeloops \ + -unreachable \ + -unsafeptr \ + -unusedresult \ + . +else + GO_VET := @echo "go vet skipped -- not supported on this version of Go" +endif + +default: fmt vet lint build quicktest + +install: + go get -t -v ./... + +build: + go build -v ./... + +test: + go test -v $(RACE_FLAG) -cover ./... + +quicktest: + go test ./... + +# Capture output and force failure when there is non-empty output +fmt: + @echo gofmt -l . + @OUTPUT=`gofmt -l . 2>&1`; \ + if [ "$$OUTPUT" ]; then \ + echo "gofmt must be run on the following files:"; \ + echo "$$OUTPUT"; \ + exit 1; \ + fi + +vet: + $(GO_VET) + +# https://github.com/golang/lint +# go get github.com/golang/lint/golint +# Capture output and force failure when there is non-empty output +# Only run on go1.5+ +lint: + @echo golint ./... + @OUTPUT=`command -v golint >/dev/null 2>&1 && golint ./... 2>&1`; \ + if [ "$$OUTPUT" ]; then \ + echo "golint errors:"; \ + echo "$$OUTPUT"; \ + exit 1; \ + fi diff --git a/vendor/github.com/mattermost/ldap/README.md b/vendor/github.com/mattermost/ldap/README.md new file mode 100644 index 00000000..87287065 --- /dev/null +++ b/vendor/github.com/mattermost/ldap/README.md @@ -0,0 +1,61 @@ +[![Go Reference](https://pkg.go.dev/badge/github.com/mattermost/ldap.svg)](https://pkg.go.dev/github.com/mattermost/ldap) +[![Build Status](https://travis-ci.org/go-ldap/ldap.svg)](https://travis-ci.org/go-ldap/ldap) + +# Basic LDAP v3 functionality for the GO programming language. + +## Features: + + - Connecting to LDAP server (non-TLS, TLS, STARTTLS) + - Binding to LDAP server + - Searching for entries + - Filter Compile / Decompile + - Paging Search Results + - Modify Requests / Responses + - Add Requests / Responses + - Delete Requests / Responses + - Modify DN Requests / Responses + +## Examples: + + - search + - modify + +## Go Modules: + +`go get github.com/go-ldap/ldap/v3` + +As go-ldap was v2+ when Go Modules came out, updating to Go Modules would be considered a breaking change. + +To maintain backwards compatability, we ultimately decided to use subfolders (as v3 was already a branch). +Whilst this duplicates the code, we can move toward implementing a backwards-compatible versioning system that allows for code reuse. +The alternative would be to increment the version number, however we believe that this would confuse users as v3 is in line with LDAPv3 (RFC-4511) +https://tools.ietf.org/html/rfc4511 + + +For more info, please visit the pull request that updated to modules. +https://github.com/go-ldap/ldap/pull/247 + +To install with `GOMODULE111=off`, use `go get github.com/go-ldap/ldap` +https://golang.org/cmd/go/#hdr-Legacy_GOPATH_go_get + +As always, we are looking for contributors with great ideas on how to best move forward. + + +## Contributing: + +Bug reports and pull requests are welcome! + +Before submitting a pull request, please make sure tests and verification scripts pass: +``` +make all +``` + +To set up a pre-push hook to run the tests and verify scripts before pushing: +``` +ln -s ../../.githooks/pre-push .git/hooks/pre-push +``` + +--- +The Go gopher was designed by Renee French. (http://reneefrench.blogspot.com/) +The design is licensed under the Creative Commons 3.0 Attributions license. +Read this article for more details: http://blog.golang.org/gopher diff --git a/vendor/github.com/mattermost/ldap/add.go b/vendor/github.com/mattermost/ldap/add.go new file mode 100644 index 00000000..334e3234 --- /dev/null +++ b/vendor/github.com/mattermost/ldap/add.go @@ -0,0 +1,100 @@ +// +// https://tools.ietf.org/html/rfc4511 +// +// AddRequest ::= [APPLICATION 8] SEQUENCE { +// entry LDAPDN, +// attributes AttributeList } +// +// AttributeList ::= SEQUENCE OF attribute Attribute + +package ldap + +import ( + ber "github.com/go-asn1-ber/asn1-ber" + "github.com/mattermost/mattermost/server/public/shared/mlog" +) + +// Attribute represents an LDAP attribute +type Attribute struct { + // Type is the name of the LDAP attribute + Type string + // Vals are the LDAP attribute values + Vals []string +} + +func (a *Attribute) encode() *ber.Packet { + seq := ber.Encode(ber.ClassUniversal, ber.TypeConstructed, ber.TagSequence, nil, "Attribute") + seq.AppendChild(ber.NewString(ber.ClassUniversal, ber.TypePrimitive, ber.TagOctetString, a.Type, "Type")) + set := ber.Encode(ber.ClassUniversal, ber.TypeConstructed, ber.TagSet, nil, "AttributeValue") + for _, value := range a.Vals { + set.AppendChild(ber.NewString(ber.ClassUniversal, ber.TypePrimitive, ber.TagOctetString, value, "Vals")) + } + seq.AppendChild(set) + return seq +} + +// AddRequest represents an LDAP AddRequest operation +type AddRequest struct { + // DN identifies the entry being added + DN string + // Attributes list the attributes of the new entry + Attributes []Attribute + // Controls hold optional controls to send with the request + Controls []Control +} + +func (req *AddRequest) appendTo(envelope *ber.Packet) error { + pkt := ber.Encode(ber.ClassApplication, ber.TypeConstructed, ApplicationAddRequest, nil, "Add Request") + pkt.AppendChild(ber.NewString(ber.ClassUniversal, ber.TypePrimitive, ber.TagOctetString, req.DN, "DN")) + attributes := ber.Encode(ber.ClassUniversal, ber.TypeConstructed, ber.TagSequence, nil, "Attributes") + for _, attribute := range req.Attributes { + attributes.AppendChild(attribute.encode()) + } + pkt.AppendChild(attributes) + + envelope.AppendChild(pkt) + if len(req.Controls) > 0 { + envelope.AppendChild(encodeControls(req.Controls)) + } + + return nil +} + +// Attribute adds an attribute with the given type and values +func (req *AddRequest) Attribute(attrType string, attrVals []string) { + req.Attributes = append(req.Attributes, Attribute{Type: attrType, Vals: attrVals}) +} + +// NewAddRequest returns an AddRequest for the given DN, with no attributes +func NewAddRequest(dn string, controls []Control) *AddRequest { + return &AddRequest{ + DN: dn, + Controls: controls, + } + +} + +// Add performs the given AddRequest +func (l *Conn) Add(addRequest *AddRequest) error { + msgCtx, err := l.doRequest(addRequest) + if err != nil { + return err + } + defer l.finishMessage(msgCtx) + + packet, err := l.readPacket(msgCtx) + if err != nil { + return err + } + + tag := packet.Children[1].Tag + if tag == ApplicationAddResponse { + err := GetLDAPError(packet) + if err != nil { + return err + } + } else { + l.Debug.Log("Unexpected Response", mlog.Uint("tag", tag)) + } + return nil +} diff --git a/vendor/github.com/mattermost/ldap/bind.go b/vendor/github.com/mattermost/ldap/bind.go new file mode 100644 index 00000000..15307f50 --- /dev/null +++ b/vendor/github.com/mattermost/ldap/bind.go @@ -0,0 +1,152 @@ +package ldap + +import ( + "errors" + "fmt" + + ber "github.com/go-asn1-ber/asn1-ber" +) + +// SimpleBindRequest represents a username/password bind operation +type SimpleBindRequest struct { + // Username is the name of the Directory object that the client wishes to bind as + Username string + // Password is the credentials to bind with + Password string + // Controls are optional controls to send with the bind request + Controls []Control + // AllowEmptyPassword sets whether the client allows binding with an empty password + // (normally used for unauthenticated bind). + AllowEmptyPassword bool +} + +// SimpleBindResult contains the response from the server +type SimpleBindResult struct { + Controls []Control +} + +// NewSimpleBindRequest returns a bind request +func NewSimpleBindRequest(username string, password string, controls []Control) *SimpleBindRequest { + return &SimpleBindRequest{ + Username: username, + Password: password, + Controls: controls, + AllowEmptyPassword: false, + } +} + +func (req *SimpleBindRequest) appendTo(envelope *ber.Packet) error { + pkt := ber.Encode(ber.ClassApplication, ber.TypeConstructed, ApplicationBindRequest, nil, "Bind Request") + pkt.AppendChild(ber.NewInteger(ber.ClassUniversal, ber.TypePrimitive, ber.TagInteger, 3, "Version")) + pkt.AppendChild(ber.NewString(ber.ClassUniversal, ber.TypePrimitive, ber.TagOctetString, req.Username, "User Name")) + pkt.AppendChild(ber.NewString(ber.ClassContext, ber.TypePrimitive, 0, req.Password, "Password")) + + envelope.AppendChild(pkt) + if len(req.Controls) > 0 { + envelope.AppendChild(encodeControls(req.Controls)) + } + + return nil +} + +// SimpleBind performs the simple bind operation defined in the given request +func (l *Conn) SimpleBind(simpleBindRequest *SimpleBindRequest) (*SimpleBindResult, error) { + if simpleBindRequest.Password == "" && !simpleBindRequest.AllowEmptyPassword { + return nil, NewError(ErrorEmptyPassword, errors.New("ldap: empty password not allowed by the client")) + } + + msgCtx, err := l.doRequest(simpleBindRequest) + if err != nil { + return nil, err + } + defer l.finishMessage(msgCtx) + + packet, err := l.readPacket(msgCtx) + if err != nil { + return nil, err + } + + result := &SimpleBindResult{ + Controls: make([]Control, 0), + } + + if len(packet.Children) == 3 { + for _, child := range packet.Children[2].Children { + decodedChild, decodeErr := DecodeControl(child) + if decodeErr != nil { + return nil, fmt.Errorf("failed to decode child control: %s", decodeErr) + } + result.Controls = append(result.Controls, decodedChild) + } + } + + err = GetLDAPError(packet) + return result, err +} + +// Bind performs a bind with the given username and password. +// +// It does not allow unauthenticated bind (i.e. empty password). Use the UnauthenticatedBind method +// for that. +func (l *Conn) Bind(username, password string) error { + req := &SimpleBindRequest{ + Username: username, + Password: password, + AllowEmptyPassword: false, + } + _, err := l.SimpleBind(req) + return err +} + +// UnauthenticatedBind performs an unauthenticated bind. +// +// A username may be provided for trace (e.g. logging) purpose only, but it is normally not +// authenticated or otherwise validated by the LDAP server. +// +// See https://tools.ietf.org/html/rfc4513#section-5.1.2 . +// See https://tools.ietf.org/html/rfc4513#section-6.3.1 . +func (l *Conn) UnauthenticatedBind(username string) error { + req := &SimpleBindRequest{ + Username: username, + Password: "", + AllowEmptyPassword: true, + } + _, err := l.SimpleBind(req) + return err +} + +var externalBindRequest = requestFunc(func(envelope *ber.Packet) error { + pkt := ber.Encode(ber.ClassApplication, ber.TypeConstructed, ApplicationBindRequest, nil, "Bind Request") + pkt.AppendChild(ber.NewInteger(ber.ClassUniversal, ber.TypePrimitive, ber.TagInteger, 3, "Version")) + pkt.AppendChild(ber.NewString(ber.ClassUniversal, ber.TypePrimitive, ber.TagOctetString, "", "User Name")) + + saslAuth := ber.Encode(ber.ClassContext, ber.TypeConstructed, 3, "", "authentication") + saslAuth.AppendChild(ber.NewString(ber.ClassUniversal, ber.TypePrimitive, ber.TagOctetString, "EXTERNAL", "SASL Mech")) + saslAuth.AppendChild(ber.NewString(ber.ClassUniversal, ber.TypePrimitive, ber.TagOctetString, "", "SASL Cred")) + + pkt.AppendChild(saslAuth) + + envelope.AppendChild(pkt) + + return nil +}) + +// ExternalBind performs SASL/EXTERNAL authentication. +// +// Use ldap.DialURL("ldapi://") to connect to the Unix socket before ExternalBind. +// +// See https://tools.ietf.org/html/rfc4422#appendix-A +func (l *Conn) ExternalBind() error { + msgCtx, err := l.doRequest(externalBindRequest) + if err != nil { + return err + } + defer l.finishMessage(msgCtx) + + packet, err := l.readPacket(msgCtx) + if err != nil { + return err + } + + return GetLDAPError(packet) +} diff --git a/vendor/github.com/mattermost/ldap/client.go b/vendor/github.com/mattermost/ldap/client.go new file mode 100644 index 00000000..619677c7 --- /dev/null +++ b/vendor/github.com/mattermost/ldap/client.go @@ -0,0 +1,30 @@ +package ldap + +import ( + "crypto/tls" + "time" +) + +// Client knows how to interact with an LDAP server +type Client interface { + Start() + StartTLS(*tls.Config) error + Close() + SetTimeout(time.Duration) + + Bind(username, password string) error + UnauthenticatedBind(username string) error + SimpleBind(*SimpleBindRequest) (*SimpleBindResult, error) + ExternalBind() error + + Add(*AddRequest) error + Del(*DelRequest) error + Modify(*ModifyRequest) error + ModifyDN(*ModifyDNRequest) error + + Compare(dn, attribute, value string) (bool, error) + PasswordModify(*PasswordModifyRequest) (*PasswordModifyResult, error) + + Search(*SearchRequest) (*SearchResult, error) + SearchWithPaging(searchRequest *SearchRequest, pagingSize uint32) (*SearchResult, error) +} diff --git a/vendor/github.com/mattermost/ldap/compare.go b/vendor/github.com/mattermost/ldap/compare.go new file mode 100644 index 00000000..04a2e17c --- /dev/null +++ b/vendor/github.com/mattermost/ldap/compare.go @@ -0,0 +1,80 @@ +// File contains Compare functionality +// +// https://tools.ietf.org/html/rfc4511 +// +// CompareRequest ::= [APPLICATION 14] SEQUENCE { +// entry LDAPDN, +// ava AttributeValueAssertion } +// +// AttributeValueAssertion ::= SEQUENCE { +// attributeDesc AttributeDescription, +// assertionValue AssertionValue } +// +// AttributeDescription ::= LDAPString +// -- Constrained to +// -- [RFC4512] +// +// AttributeValue ::= OCTET STRING +// + +package ldap + +import ( + "fmt" + + ber "github.com/go-asn1-ber/asn1-ber" +) + +// CompareRequest represents an LDAP CompareRequest operation. +type CompareRequest struct { + DN string + Attribute string + Value string +} + +func (req *CompareRequest) appendTo(envelope *ber.Packet) error { + pkt := ber.Encode(ber.ClassApplication, ber.TypeConstructed, ApplicationCompareRequest, nil, "Compare Request") + pkt.AppendChild(ber.NewString(ber.ClassUniversal, ber.TypePrimitive, ber.TagOctetString, req.DN, "DN")) + + ava := ber.Encode(ber.ClassUniversal, ber.TypeConstructed, ber.TagSequence, nil, "AttributeValueAssertion") + ava.AppendChild(ber.NewString(ber.ClassUniversal, ber.TypePrimitive, ber.TagOctetString, req.Attribute, "AttributeDesc")) + ava.AppendChild(ber.Encode(ber.ClassUniversal, ber.TypePrimitive, ber.TagOctetString, req.Value, "AssertionValue")) + + pkt.AppendChild(ava) + + envelope.AppendChild(pkt) + + return nil +} + +// Compare checks to see if the attribute of the dn matches value. Returns true if it does otherwise +// false with any error that occurs if any. +func (l *Conn) Compare(dn, attribute, value string) (bool, error) { + msgCtx, err := l.doRequest(&CompareRequest{ + DN: dn, + Attribute: attribute, + Value: value}) + if err != nil { + return false, err + } + defer l.finishMessage(msgCtx) + + packet, err := l.readPacket(msgCtx) + if err != nil { + return false, err + } + + if packet.Children[1].Tag == ApplicationCompareResponse { + err := GetLDAPError(packet) + + switch { + case IsErrorWithCode(err, LDAPResultCompareTrue): + return true, nil + case IsErrorWithCode(err, LDAPResultCompareFalse): + return false, nil + default: + return false, err + } + } + return false, fmt.Errorf("unexpected Response: %d", packet.Children[1].Tag) +} diff --git a/vendor/github.com/mattermost/ldap/conn.go b/vendor/github.com/mattermost/ldap/conn.go new file mode 100644 index 00000000..8644c1ee --- /dev/null +++ b/vendor/github.com/mattermost/ldap/conn.go @@ -0,0 +1,522 @@ +package ldap + +import ( + "crypto/tls" + "errors" + "fmt" + "net" + "net/url" + "sync" + "sync/atomic" + "time" + + ber "github.com/go-asn1-ber/asn1-ber" + "github.com/mattermost/mattermost/server/public/shared/mlog" +) + +const ( + // MessageQuit causes the processMessages loop to exit + MessageQuit = 0 + // MessageRequest sends a request to the server + MessageRequest = 1 + // MessageResponse receives a response from the server + MessageResponse = 2 + // MessageFinish indicates the client considers a particular message ID to be finished + MessageFinish = 3 + // MessageTimeout indicates the client-specified timeout for a particular message ID has been reached + MessageTimeout = 4 +) + +const ( + // DefaultLdapPort default ldap port for pure TCP connection + DefaultLdapPort = "389" + // DefaultLdapsPort default ldap port for SSL connection + DefaultLdapsPort = "636" +) + +// PacketResponse contains the packet or error encountered reading a response +type PacketResponse struct { + // Packet is the packet read from the server + Packet *ber.Packet + // Error is an error encountered while reading + Error error +} + +// ReadPacket returns the packet or an error +func (pr *PacketResponse) ReadPacket() (*ber.Packet, error) { + if (pr == nil) || (pr.Packet == nil && pr.Error == nil) { + return nil, NewError(ErrorNetwork, errors.New("ldap: could not retrieve response")) + } + return pr.Packet, pr.Error +} + +type messageContext struct { + id int64 + // close(done) should only be called from finishMessage() + done chan struct{} + // close(responses) should only be called from processMessages(), and only sent to from sendResponse() + responses chan *PacketResponse +} + +// sendResponse should only be called within the processMessages() loop which +// is also responsible for closing the responses channel. +func (msgCtx *messageContext) sendResponse(packet *PacketResponse) { + select { + case msgCtx.responses <- packet: + // Successfully sent packet to message handler. + case <-msgCtx.done: + // The request handler is done and will not receive more + // packets. + } +} + +type messagePacket struct { + Op int + MessageID int64 + Packet *ber.Packet + Context *messageContext +} + +type sendMessageFlags uint + +const ( + startTLS sendMessageFlags = 1 << iota +) + +// Conn represents an LDAP Connection +type Conn struct { + // requestTimeout is loaded atomically + // so we need to ensure 64-bit alignment on 32-bit platforms. + requestTimeout int64 + conn net.Conn + isTLS bool + closing uint32 + closeErr atomic.Value + isStartingTLS bool + Debug debugging + chanConfirm chan struct{} + messageContexts map[int64]*messageContext + chanMessage chan *messagePacket + chanMessageID chan int64 + wgClose sync.WaitGroup + outstandingRequests uint + messageMutex sync.Mutex +} + +var _ Client = &Conn{} + +// DefaultTimeout is a package-level variable that sets the timeout value +// used for the Dial and DialTLS methods. +// +// WARNING: since this is a package-level variable, setting this value from +// multiple places will probably result in undesired behaviour. +var DefaultTimeout = 60 * time.Second + +// Dial connects to the given address on the given network using net.Dial +// and then returns a new Conn for the connection. +func Dial(network, addr string) (*Conn, error) { + c, err := net.DialTimeout(network, addr, DefaultTimeout) + if err != nil { + return nil, NewError(ErrorNetwork, err) + } + conn := NewConn(c, false) + return conn, nil +} + +// DialTLS connects to the given address on the given network using tls.Dial +// and then returns a new Conn for the connection. +func DialTLS(network, addr string, config *tls.Config) (*Conn, error) { + c, err := tls.DialWithDialer(&net.Dialer{Timeout: DefaultTimeout}, network, addr, config) + if err != nil { + return nil, NewError(ErrorNetwork, err) + } + conn := NewConn(c, true) + return conn, nil +} + +// DialURL connects to the given ldap URL vie TCP using tls.Dial or net.Dial if ldaps:// +// or ldap:// specified as protocol. On success a new Conn for the connection +// is returned. +func DialURL(addr string) (*Conn, error) { + lurl, err := url.Parse(addr) + if err != nil { + return nil, NewError(ErrorNetwork, err) + } + + host, port, err := net.SplitHostPort(lurl.Host) + if err != nil { + // we asume that error is due to missing port + host = lurl.Host + port = "" + } + + switch lurl.Scheme { + case "ldapi": + if lurl.Path == "" || lurl.Path == "/" { + lurl.Path = "/var/run/slapd/ldapi" + } + return Dial("unix", lurl.Path) + case "ldap": + if port == "" { + port = DefaultLdapPort + } + return Dial("tcp", net.JoinHostPort(host, port)) + case "ldaps": + if port == "" { + port = DefaultLdapsPort + } + tlsConf := &tls.Config{ + ServerName: host, + } + return DialTLS("tcp", net.JoinHostPort(host, port), tlsConf) + } + + return nil, NewError(ErrorNetwork, fmt.Errorf("Unknown scheme '%s'", lurl.Scheme)) +} + +// NewConn returns a new Conn using conn for network I/O. +func NewConn(conn net.Conn, isTLS bool) *Conn { + return &Conn{ + conn: conn, + chanConfirm: make(chan struct{}), + chanMessageID: make(chan int64), + chanMessage: make(chan *messagePacket, 10), + messageContexts: map[int64]*messageContext{}, + requestTimeout: 0, + isTLS: isTLS, + } +} + +// Start initializes goroutines to read responses and process messages +func (l *Conn) Start() { + l.wgClose.Add(1) + go l.reader() + go l.processMessages() +} + +// IsClosing returns whether or not we're currently closing. +func (l *Conn) IsClosing() bool { + return atomic.LoadUint32(&l.closing) == 1 +} + +// setClosing sets the closing value to true +func (l *Conn) setClosing() bool { + return atomic.CompareAndSwapUint32(&l.closing, 0, 1) +} + +// Close closes the connection. +func (l *Conn) Close() { + l.messageMutex.Lock() + defer l.messageMutex.Unlock() + + if l.setClosing() { + l.Debug.Log("Sending quit message and waiting for confirmation") + l.chanMessage <- &messagePacket{Op: MessageQuit} + <-l.chanConfirm + close(l.chanMessage) + + l.Debug.Log("Closing network connection") + if err := l.conn.Close(); err != nil { + l.Debug.Log("Error closing network connection", mlog.Err(err)) + } + + l.wgClose.Done() + } + l.wgClose.Wait() +} + +// SetTimeout sets the time after a request is sent that a MessageTimeout triggers +func (l *Conn) SetTimeout(timeout time.Duration) { + if timeout > 0 { + atomic.StoreInt64(&l.requestTimeout, int64(timeout)) + } +} + +// Returns the next available messageID +func (l *Conn) nextMessageID() int64 { + if messageID, ok := <-l.chanMessageID; ok { + return messageID + } + return 0 +} + +// StartTLS sends the command to start a TLS session and then creates a new TLS Client +func (l *Conn) StartTLS(config *tls.Config) error { + if l.isTLS { + return NewError(ErrorNetwork, errors.New("ldap: already encrypted")) + } + + packet := ber.Encode(ber.ClassUniversal, ber.TypeConstructed, ber.TagSequence, nil, "LDAP Request") + packet.AppendChild(ber.NewInteger(ber.ClassUniversal, ber.TypePrimitive, ber.TagInteger, l.nextMessageID(), "MessageID")) + request := ber.Encode(ber.ClassApplication, ber.TypeConstructed, ApplicationExtendedRequest, nil, "Start TLS") + request.AppendChild(ber.NewString(ber.ClassContext, ber.TypePrimitive, 0, "1.3.6.1.4.1.1466.20037", "TLS Extended Command")) + packet.AppendChild(request) + + l.Debug.Log("Sending StartTLS packet", PacketToField(packet)) + + msgCtx, err := l.sendMessageWithFlags(packet, startTLS) + if err != nil { + return err + } + defer l.finishMessage(msgCtx) + + l.Debug.Log("Waiting for StartTLS response", mlog.Int("id", msgCtx.id)) + + packetResponse, ok := <-msgCtx.responses + if !ok { + return NewError(ErrorNetwork, errors.New("ldap: response channel closed")) + } + packet, err = packetResponse.ReadPacket() + if l.Debug.Enabled() { + if err := addLDAPDescriptions(packet); err != nil { + l.Close() + return err + } + + l.Debug.Log("Got response %p", mlog.Err(err), mlog.Int("id", msgCtx.id), PacketToField(packet), mlog.Err(err)) + } + + if err != nil { + return err + } + + if err := GetLDAPError(packet); err == nil { + conn := tls.Client(l.conn, config) + + if connErr := conn.Handshake(); connErr != nil { + l.Close() + return NewError(ErrorNetwork, fmt.Errorf("TLS handshake failed (%v)", connErr)) + } + + l.isTLS = true + l.conn = conn + } else { + return err + } + go l.reader() + + return nil +} + +// TLSConnectionState returns the client's TLS connection state. +// The return values are their zero values if StartTLS did +// not succeed. +func (l *Conn) TLSConnectionState() (state tls.ConnectionState, ok bool) { + tc, ok := l.conn.(*tls.Conn) + if !ok { + return + } + return tc.ConnectionState(), true +} + +func (l *Conn) sendMessage(packet *ber.Packet) (*messageContext, error) { + return l.sendMessageWithFlags(packet, 0) +} + +func (l *Conn) sendMessageWithFlags(packet *ber.Packet, flags sendMessageFlags) (*messageContext, error) { + if l.IsClosing() { + return nil, NewError(ErrorNetwork, errors.New("ldap: connection closed")) + } + l.messageMutex.Lock() + + if l.isStartingTLS { + l.messageMutex.Unlock() + return nil, NewError(ErrorNetwork, errors.New("ldap: connection is in startls phase")) + } + if flags&startTLS != 0 { + if l.outstandingRequests != 0 { + l.messageMutex.Unlock() + return nil, NewError(ErrorNetwork, errors.New("ldap: cannot StartTLS with outstanding requests")) + } + l.isStartingTLS = true + } + l.outstandingRequests++ + + l.messageMutex.Unlock() + + responses := make(chan *PacketResponse) + messageID := packet.Children[0].Value.(int64) + message := &messagePacket{ + Op: MessageRequest, + MessageID: messageID, + Packet: packet, + Context: &messageContext{ + id: messageID, + done: make(chan struct{}), + responses: responses, + }, + } + l.sendProcessMessage(message) + return message.Context, nil +} + +func (l *Conn) finishMessage(msgCtx *messageContext) { + close(msgCtx.done) + + if l.IsClosing() { + return + } + + l.messageMutex.Lock() + l.outstandingRequests-- + if l.isStartingTLS { + l.isStartingTLS = false + } + l.messageMutex.Unlock() + + message := &messagePacket{ + Op: MessageFinish, + MessageID: msgCtx.id, + } + l.sendProcessMessage(message) +} + +func (l *Conn) sendProcessMessage(message *messagePacket) bool { + l.messageMutex.Lock() + defer l.messageMutex.Unlock() + if l.IsClosing() { + return false + } + l.chanMessage <- message + return true +} + +func (l *Conn) processMessages() { + defer func() { + if r := recover(); r != nil { + l.Debug.Log("Recovered panic in processMessages", mlog.Any("panic", r)) + } + for messageID, msgCtx := range l.messageContexts { + // If we are closing due to an error, inform anyone who + // is waiting about the error. + if l.IsClosing() && l.closeErr.Load() != nil { + msgCtx.sendResponse(&PacketResponse{Error: l.closeErr.Load().(error)}) + } + l.Debug.Log("Closing channel for MessageID", mlog.Int("message_id", messageID)) + close(msgCtx.responses) + delete(l.messageContexts, messageID) + } + close(l.chanMessageID) + close(l.chanConfirm) + }() + + var messageID int64 = 1 + for { + select { + case l.chanMessageID <- messageID: + messageID++ + case message := <-l.chanMessage: + switch message.Op { + case MessageQuit: + l.Debug.Log("Quit message received: Shutting down") + return + case MessageRequest: + // Add to message list and write to network + buf := message.Packet.Bytes() + _, err := l.conn.Write(buf) + if err != nil { + l.Debug.Log("Error Sending Message", mlog.Err(err)) + message.Context.sendResponse(&PacketResponse{Error: fmt.Errorf("unable to send request: %s", err)}) + close(message.Context.responses) + break + } + + // Only add to messageContexts if we were able to + // successfully write the message. + l.messageContexts[message.MessageID] = message.Context + + // Add timeout if defined + requestTimeout := time.Duration(atomic.LoadInt64(&l.requestTimeout)) + if requestTimeout > 0 { + go func() { + defer func() { + if r := recover(); r != nil { + l.Debug.Log("Recovered panic in RequestTimeout", mlog.Any("panic", r)) + } + }() + time.Sleep(requestTimeout) + timeoutMessage := &messagePacket{ + Op: MessageTimeout, + MessageID: message.MessageID, + } + l.sendProcessMessage(timeoutMessage) + }() + } + case MessageResponse: + if msgCtx, ok := l.messageContexts[message.MessageID]; ok { + msgCtx.sendResponse(&PacketResponse{message.Packet, nil}) + } else { + l.Debug.Log( + "Received unexpected message", + mlog.Int("message_id", message.MessageID), + mlog.Bool("is_closing", l.IsClosing()), + PacketToField(message.Packet), + ) + } + case MessageTimeout: + // Handle the timeout by closing the channel + // All reads will return immediately + if msgCtx, ok := l.messageContexts[message.MessageID]; ok { + l.Debug.Log("Receiving message timeout", mlog.Int("message_id", message.MessageID)) + msgCtx.sendResponse(&PacketResponse{message.Packet, errors.New("ldap: connection timed out")}) + delete(l.messageContexts, message.MessageID) + close(msgCtx.responses) + } + case MessageFinish: + l.Debug.Log("Finished message", mlog.Int("message_id", message.MessageID)) + if msgCtx, ok := l.messageContexts[message.MessageID]; ok { + delete(l.messageContexts, message.MessageID) + close(msgCtx.responses) + } + } + } + } +} + +func (l *Conn) reader() { + cleanstop := false + defer func() { + if r := recover(); r != nil { + l.Debug.Log("Recovered panic in reader", mlog.Any("panic", r)) + } + if !cleanstop { + l.Close() + } + }() + + for { + if cleanstop { + l.Debug.Log("Reader clean stopping (without closing the connection)") + return + } + packet, err := ber.ReadPacket(l.conn) + if err != nil { + // A read error is expected here if we are closing the connection... + if !l.IsClosing() { + l.closeErr.Store(fmt.Errorf("unable to read LDAP response packet: %s", err)) + l.Debug.Log("Reader error", mlog.Err(err)) + } + return + } + if err := addLDAPDescriptions(packet); err != nil { + l.Debug.Log("Descriptions error", mlog.Err(err)) + } + if len(packet.Children) == 0 { + l.Debug.Log("Received bad ldap packet") + continue + } + l.messageMutex.Lock() + if l.isStartingTLS { + cleanstop = true + } + l.messageMutex.Unlock() + message := &messagePacket{ + Op: MessageResponse, + MessageID: packet.Children[0].Value.(int64), + Packet: packet, + } + if !l.sendProcessMessage(message) { + return + } + } +} diff --git a/vendor/github.com/mattermost/ldap/control.go b/vendor/github.com/mattermost/ldap/control.go new file mode 100644 index 00000000..463fe3a3 --- /dev/null +++ b/vendor/github.com/mattermost/ldap/control.go @@ -0,0 +1,499 @@ +package ldap + +import ( + "fmt" + "strconv" + + "github.com/go-asn1-ber/asn1-ber" +) + +const ( + // ControlTypePaging - https://www.ietf.org/rfc/rfc2696.txt + ControlTypePaging = "1.2.840.113556.1.4.319" + // ControlTypeBeheraPasswordPolicy - https://tools.ietf.org/html/draft-behera-ldap-password-policy-10 + ControlTypeBeheraPasswordPolicy = "1.3.6.1.4.1.42.2.27.8.5.1" + // ControlTypeVChuPasswordMustChange - https://tools.ietf.org/html/draft-vchu-ldap-pwd-policy-00 + ControlTypeVChuPasswordMustChange = "2.16.840.1.113730.3.4.4" + // ControlTypeVChuPasswordWarning - https://tools.ietf.org/html/draft-vchu-ldap-pwd-policy-00 + ControlTypeVChuPasswordWarning = "2.16.840.1.113730.3.4.5" + // ControlTypeManageDsaIT - https://tools.ietf.org/html/rfc3296 + ControlTypeManageDsaIT = "2.16.840.1.113730.3.4.2" + + // ControlTypeMicrosoftNotification - https://msdn.microsoft.com/en-us/library/aa366983(v=vs.85).aspx + ControlTypeMicrosoftNotification = "1.2.840.113556.1.4.528" + // ControlTypeMicrosoftShowDeleted - https://msdn.microsoft.com/en-us/library/aa366989(v=vs.85).aspx + ControlTypeMicrosoftShowDeleted = "1.2.840.113556.1.4.417" +) + +// ControlTypeMap maps controls to text descriptions +var ControlTypeMap = map[string]string{ + ControlTypePaging: "Paging", + ControlTypeBeheraPasswordPolicy: "Password Policy - Behera Draft", + ControlTypeManageDsaIT: "Manage DSA IT", + ControlTypeMicrosoftNotification: "Change Notification - Microsoft", + ControlTypeMicrosoftShowDeleted: "Show Deleted Objects - Microsoft", +} + +// Control defines an interface controls provide to encode and describe themselves +type Control interface { + // GetControlType returns the OID + GetControlType() string + // Encode returns the ber packet representation + Encode() *ber.Packet + // String returns a human-readable description + String() string +} + +// ControlString implements the Control interface for simple controls +type ControlString struct { + ControlType string + Criticality bool + ControlValue string +} + +// GetControlType returns the OID +func (c *ControlString) GetControlType() string { + return c.ControlType +} + +// Encode returns the ber packet representation +func (c *ControlString) Encode() *ber.Packet { + packet := ber.Encode(ber.ClassUniversal, ber.TypeConstructed, ber.TagSequence, nil, "Control") + packet.AppendChild(ber.NewString(ber.ClassUniversal, ber.TypePrimitive, ber.TagOctetString, c.ControlType, "Control Type ("+ControlTypeMap[c.ControlType]+")")) + if c.Criticality { + packet.AppendChild(ber.NewBoolean(ber.ClassUniversal, ber.TypePrimitive, ber.TagBoolean, c.Criticality, "Criticality")) + } + if c.ControlValue != "" { + packet.AppendChild(ber.NewString(ber.ClassUniversal, ber.TypePrimitive, ber.TagOctetString, string(c.ControlValue), "Control Value")) + } + return packet +} + +// String returns a human-readable description +func (c *ControlString) String() string { + return fmt.Sprintf("Control Type: %s (%q) Criticality: %t Control Value: %s", ControlTypeMap[c.ControlType], c.ControlType, c.Criticality, c.ControlValue) +} + +// ControlPaging implements the paging control described in https://www.ietf.org/rfc/rfc2696.txt +type ControlPaging struct { + // PagingSize indicates the page size + PagingSize uint32 + // Cookie is an opaque value returned by the server to track a paging cursor + Cookie []byte +} + +// GetControlType returns the OID +func (c *ControlPaging) GetControlType() string { + return ControlTypePaging +} + +// Encode returns the ber packet representation +func (c *ControlPaging) Encode() *ber.Packet { + packet := ber.Encode(ber.ClassUniversal, ber.TypeConstructed, ber.TagSequence, nil, "Control") + packet.AppendChild(ber.NewString(ber.ClassUniversal, ber.TypePrimitive, ber.TagOctetString, ControlTypePaging, "Control Type ("+ControlTypeMap[ControlTypePaging]+")")) + + p2 := ber.Encode(ber.ClassUniversal, ber.TypePrimitive, ber.TagOctetString, nil, "Control Value (Paging)") + seq := ber.Encode(ber.ClassUniversal, ber.TypeConstructed, ber.TagSequence, nil, "Search Control Value") + seq.AppendChild(ber.NewInteger(ber.ClassUniversal, ber.TypePrimitive, ber.TagInteger, int64(c.PagingSize), "Paging Size")) + cookie := ber.Encode(ber.ClassUniversal, ber.TypePrimitive, ber.TagOctetString, nil, "Cookie") + cookie.Value = c.Cookie + cookie.Data.Write(c.Cookie) + seq.AppendChild(cookie) + p2.AppendChild(seq) + + packet.AppendChild(p2) + return packet +} + +// String returns a human-readable description +func (c *ControlPaging) String() string { + return fmt.Sprintf( + "Control Type: %s (%q) Criticality: %t PagingSize: %d Cookie: %q", + ControlTypeMap[ControlTypePaging], + ControlTypePaging, + false, + c.PagingSize, + c.Cookie) +} + +// SetCookie stores the given cookie in the paging control +func (c *ControlPaging) SetCookie(cookie []byte) { + c.Cookie = cookie +} + +// ControlBeheraPasswordPolicy implements the control described in https://tools.ietf.org/html/draft-behera-ldap-password-policy-10 +type ControlBeheraPasswordPolicy struct { + // Expire contains the number of seconds before a password will expire + Expire int64 + // Grace indicates the remaining number of times a user will be allowed to authenticate with an expired password + Grace int64 + // Error indicates the error code + Error int8 + // ErrorString is a human readable error + ErrorString string +} + +// GetControlType returns the OID +func (c *ControlBeheraPasswordPolicy) GetControlType() string { + return ControlTypeBeheraPasswordPolicy +} + +// Encode returns the ber packet representation +func (c *ControlBeheraPasswordPolicy) Encode() *ber.Packet { + packet := ber.Encode(ber.ClassUniversal, ber.TypeConstructed, ber.TagSequence, nil, "Control") + packet.AppendChild(ber.NewString(ber.ClassUniversal, ber.TypePrimitive, ber.TagOctetString, ControlTypeBeheraPasswordPolicy, "Control Type ("+ControlTypeMap[ControlTypeBeheraPasswordPolicy]+")")) + + return packet +} + +// String returns a human-readable description +func (c *ControlBeheraPasswordPolicy) String() string { + return fmt.Sprintf( + "Control Type: %s (%q) Criticality: %t Expire: %d Grace: %d Error: %d, ErrorString: %s", + ControlTypeMap[ControlTypeBeheraPasswordPolicy], + ControlTypeBeheraPasswordPolicy, + false, + c.Expire, + c.Grace, + c.Error, + c.ErrorString) +} + +// ControlVChuPasswordMustChange implements the control described in https://tools.ietf.org/html/draft-vchu-ldap-pwd-policy-00 +type ControlVChuPasswordMustChange struct { + // MustChange indicates if the password is required to be changed + MustChange bool +} + +// GetControlType returns the OID +func (c *ControlVChuPasswordMustChange) GetControlType() string { + return ControlTypeVChuPasswordMustChange +} + +// Encode returns the ber packet representation +func (c *ControlVChuPasswordMustChange) Encode() *ber.Packet { + return nil +} + +// String returns a human-readable description +func (c *ControlVChuPasswordMustChange) String() string { + return fmt.Sprintf( + "Control Type: %s (%q) Criticality: %t MustChange: %v", + ControlTypeMap[ControlTypeVChuPasswordMustChange], + ControlTypeVChuPasswordMustChange, + false, + c.MustChange) +} + +// ControlVChuPasswordWarning implements the control described in https://tools.ietf.org/html/draft-vchu-ldap-pwd-policy-00 +type ControlVChuPasswordWarning struct { + // Expire indicates the time in seconds until the password expires + Expire int64 +} + +// GetControlType returns the OID +func (c *ControlVChuPasswordWarning) GetControlType() string { + return ControlTypeVChuPasswordWarning +} + +// Encode returns the ber packet representation +func (c *ControlVChuPasswordWarning) Encode() *ber.Packet { + return nil +} + +// String returns a human-readable description +func (c *ControlVChuPasswordWarning) String() string { + return fmt.Sprintf( + "Control Type: %s (%q) Criticality: %t Expire: %b", + ControlTypeMap[ControlTypeVChuPasswordWarning], + ControlTypeVChuPasswordWarning, + false, + c.Expire) +} + +// ControlManageDsaIT implements the control described in https://tools.ietf.org/html/rfc3296 +type ControlManageDsaIT struct { + // Criticality indicates if this control is required + Criticality bool +} + +// GetControlType returns the OID +func (c *ControlManageDsaIT) GetControlType() string { + return ControlTypeManageDsaIT +} + +// Encode returns the ber packet representation +func (c *ControlManageDsaIT) Encode() *ber.Packet { + //FIXME + packet := ber.Encode(ber.ClassUniversal, ber.TypeConstructed, ber.TagSequence, nil, "Control") + packet.AppendChild(ber.NewString(ber.ClassUniversal, ber.TypePrimitive, ber.TagOctetString, ControlTypeManageDsaIT, "Control Type ("+ControlTypeMap[ControlTypeManageDsaIT]+")")) + if c.Criticality { + packet.AppendChild(ber.NewBoolean(ber.ClassUniversal, ber.TypePrimitive, ber.TagBoolean, c.Criticality, "Criticality")) + } + return packet +} + +// String returns a human-readable description +func (c *ControlManageDsaIT) String() string { + return fmt.Sprintf( + "Control Type: %s (%q) Criticality: %t", + ControlTypeMap[ControlTypeManageDsaIT], + ControlTypeManageDsaIT, + c.Criticality) +} + +// NewControlManageDsaIT returns a ControlManageDsaIT control +func NewControlManageDsaIT(Criticality bool) *ControlManageDsaIT { + return &ControlManageDsaIT{Criticality: Criticality} +} + +// ControlMicrosoftNotification implements the control described in https://msdn.microsoft.com/en-us/library/aa366983(v=vs.85).aspx +type ControlMicrosoftNotification struct{} + +// GetControlType returns the OID +func (c *ControlMicrosoftNotification) GetControlType() string { + return ControlTypeMicrosoftNotification +} + +// Encode returns the ber packet representation +func (c *ControlMicrosoftNotification) Encode() *ber.Packet { + packet := ber.Encode(ber.ClassUniversal, ber.TypeConstructed, ber.TagSequence, nil, "Control") + packet.AppendChild(ber.NewString(ber.ClassUniversal, ber.TypePrimitive, ber.TagOctetString, ControlTypeMicrosoftNotification, "Control Type ("+ControlTypeMap[ControlTypeMicrosoftNotification]+")")) + + return packet +} + +// String returns a human-readable description +func (c *ControlMicrosoftNotification) String() string { + return fmt.Sprintf( + "Control Type: %s (%q)", + ControlTypeMap[ControlTypeMicrosoftNotification], + ControlTypeMicrosoftNotification) +} + +// NewControlMicrosoftNotification returns a ControlMicrosoftNotification control +func NewControlMicrosoftNotification() *ControlMicrosoftNotification { + return &ControlMicrosoftNotification{} +} + +// ControlMicrosoftShowDeleted implements the control described in https://msdn.microsoft.com/en-us/library/aa366989(v=vs.85).aspx +type ControlMicrosoftShowDeleted struct{} + +// GetControlType returns the OID +func (c *ControlMicrosoftShowDeleted) GetControlType() string { + return ControlTypeMicrosoftShowDeleted +} + +// Encode returns the ber packet representation +func (c *ControlMicrosoftShowDeleted) Encode() *ber.Packet { + packet := ber.Encode(ber.ClassUniversal, ber.TypeConstructed, ber.TagSequence, nil, "Control") + packet.AppendChild(ber.NewString(ber.ClassUniversal, ber.TypePrimitive, ber.TagOctetString, ControlTypeMicrosoftShowDeleted, "Control Type ("+ControlTypeMap[ControlTypeMicrosoftShowDeleted]+")")) + + return packet +} + +// String returns a human-readable description +func (c *ControlMicrosoftShowDeleted) String() string { + return fmt.Sprintf( + "Control Type: %s (%q)", + ControlTypeMap[ControlTypeMicrosoftShowDeleted], + ControlTypeMicrosoftShowDeleted) +} + +// NewControlMicrosoftShowDeleted returns a ControlMicrosoftShowDeleted control +func NewControlMicrosoftShowDeleted() *ControlMicrosoftShowDeleted { + return &ControlMicrosoftShowDeleted{} +} + +// FindControl returns the first control of the given type in the list, or nil +func FindControl(controls []Control, controlType string) Control { + for _, c := range controls { + if c.GetControlType() == controlType { + return c + } + } + return nil +} + +// DecodeControl returns a control read from the given packet, or nil if no recognized control can be made +func DecodeControl(packet *ber.Packet) (Control, error) { + var ( + ControlType = "" + Criticality = false + value *ber.Packet + ) + + switch len(packet.Children) { + case 0: + // at least one child is required for control type + return nil, fmt.Errorf("at least one child is required for control type") + + case 1: + // just type, no criticality or value + packet.Children[0].Description = "Control Type (" + ControlTypeMap[ControlType] + ")" + ControlType = packet.Children[0].Value.(string) + + case 2: + packet.Children[0].Description = "Control Type (" + ControlTypeMap[ControlType] + ")" + ControlType = packet.Children[0].Value.(string) + + // Children[1] could be criticality or value (both are optional) + // duck-type on whether this is a boolean + if _, ok := packet.Children[1].Value.(bool); ok { + packet.Children[1].Description = "Criticality" + Criticality = packet.Children[1].Value.(bool) + } else { + packet.Children[1].Description = "Control Value" + value = packet.Children[1] + } + + case 3: + packet.Children[0].Description = "Control Type (" + ControlTypeMap[ControlType] + ")" + ControlType = packet.Children[0].Value.(string) + + packet.Children[1].Description = "Criticality" + Criticality = packet.Children[1].Value.(bool) + + packet.Children[2].Description = "Control Value" + value = packet.Children[2] + + default: + // more than 3 children is invalid + return nil, fmt.Errorf("more than 3 children is invalid for controls") + } + + switch ControlType { + case ControlTypeManageDsaIT: + return NewControlManageDsaIT(Criticality), nil + case ControlTypePaging: + value.Description += " (Paging)" + c := new(ControlPaging) + if value.Value != nil { + valueChildren, err := ber.DecodePacketErr(value.Data.Bytes()) + if err != nil { + return nil, fmt.Errorf("failed to decode data bytes: %s", err) + } + value.Data.Truncate(0) + value.Value = nil + value.AppendChild(valueChildren) + } + value = value.Children[0] + value.Description = "Search Control Value" + value.Children[0].Description = "Paging Size" + value.Children[1].Description = "Cookie" + c.PagingSize = uint32(value.Children[0].Value.(int64)) + c.Cookie = value.Children[1].Data.Bytes() + value.Children[1].Value = c.Cookie + return c, nil + case ControlTypeBeheraPasswordPolicy: + value.Description += " (Password Policy - Behera)" + c := NewControlBeheraPasswordPolicy() + if value.Value != nil { + valueChildren, err := ber.DecodePacketErr(value.Data.Bytes()) + if err != nil { + return nil, fmt.Errorf("failed to decode data bytes: %s", err) + } + value.Data.Truncate(0) + value.Value = nil + value.AppendChild(valueChildren) + } + + sequence := value.Children[0] + + for _, child := range sequence.Children { + if child.Tag == 0 { + //Warning + warningPacket := child.Children[0] + packet, err := ber.DecodePacketErr(warningPacket.Data.Bytes()) + if err != nil { + return nil, fmt.Errorf("failed to decode data bytes: %s", err) + } + val, ok := packet.Value.(int64) + if ok { + if warningPacket.Tag == 0 { + //timeBeforeExpiration + c.Expire = val + warningPacket.Value = c.Expire + } else if warningPacket.Tag == 1 { + //graceAuthNsRemaining + c.Grace = val + warningPacket.Value = c.Grace + } + } + } else if child.Tag == 1 { + // Error + packet, err := ber.DecodePacketErr(child.Data.Bytes()) + if err != nil { + return nil, fmt.Errorf("failed to decode data bytes: %s", err) + } + val, ok := packet.Value.(int8) + if !ok { + // what to do? + val = -1 + } + c.Error = val + child.Value = c.Error + c.ErrorString = BeheraPasswordPolicyErrorMap[c.Error] + } + } + return c, nil + case ControlTypeVChuPasswordMustChange: + c := &ControlVChuPasswordMustChange{MustChange: true} + return c, nil + case ControlTypeVChuPasswordWarning: + c := &ControlVChuPasswordWarning{Expire: -1} + expireStr := ber.DecodeString(value.Data.Bytes()) + + expire, err := strconv.ParseInt(expireStr, 10, 64) + if err != nil { + return nil, fmt.Errorf("failed to parse value as int: %s", err) + } + c.Expire = expire + value.Value = c.Expire + + return c, nil + case ControlTypeMicrosoftNotification: + return NewControlMicrosoftNotification(), nil + case ControlTypeMicrosoftShowDeleted: + return NewControlMicrosoftShowDeleted(), nil + default: + c := new(ControlString) + c.ControlType = ControlType + c.Criticality = Criticality + if value != nil { + c.ControlValue = value.Value.(string) + } + return c, nil + } +} + +// NewControlString returns a generic control +func NewControlString(controlType string, criticality bool, controlValue string) *ControlString { + return &ControlString{ + ControlType: controlType, + Criticality: criticality, + ControlValue: controlValue, + } +} + +// NewControlPaging returns a paging control +func NewControlPaging(pagingSize uint32) *ControlPaging { + return &ControlPaging{PagingSize: pagingSize} +} + +// NewControlBeheraPasswordPolicy returns a ControlBeheraPasswordPolicy +func NewControlBeheraPasswordPolicy() *ControlBeheraPasswordPolicy { + return &ControlBeheraPasswordPolicy{ + Expire: -1, + Grace: -1, + Error: -1, + } +} + +func encodeControls(controls []Control) *ber.Packet { + packet := ber.Encode(ber.ClassContext, ber.TypeConstructed, 0, nil, "Controls") + for _, control := range controls { + packet.AppendChild(control.Encode()) + } + return packet +} diff --git a/vendor/github.com/mattermost/ldap/debug.go b/vendor/github.com/mattermost/ldap/debug.go new file mode 100644 index 00000000..4af84a78 --- /dev/null +++ b/vendor/github.com/mattermost/ldap/debug.go @@ -0,0 +1,49 @@ +package ldap + +import ( + "bytes" + + ber "github.com/go-asn1-ber/asn1-ber" + "github.com/mattermost/mattermost/server/public/shared/mlog" +) + +type debugging struct { + logger mlog.LoggerIFace + levels []mlog.Level +} + +// Enable controls debugging mode. +func (debug *debugging) Enable(logger mlog.LoggerIFace, levels ...mlog.Level) { + *debug = debugging{ + logger: logger, + levels: levels, + } +} + +func (debug debugging) Enabled() bool { + return debug.logger != nil +} + +// Log writes debug output. +func (debug debugging) Log(msg string, fields ...mlog.Field) { + if debug.Enabled() { + debug.logger.LogM(debug.levels, msg, fields...) + } +} + +type Packet ber.Packet + +func (p Packet) LogClone() any { + bp := ber.Packet(p) + var b bytes.Buffer + ber.WritePacket(&b, &bp) + return b.String() + +} + +func PacketToField(packet *ber.Packet) mlog.Field { + if packet == nil { + return mlog.Any("packet", nil) + } + return mlog.Any("packet", Packet(*packet)) +} diff --git a/vendor/github.com/mattermost/ldap/del.go b/vendor/github.com/mattermost/ldap/del.go new file mode 100644 index 00000000..858f0fce --- /dev/null +++ b/vendor/github.com/mattermost/ldap/del.go @@ -0,0 +1,65 @@ +// +// https://tools.ietf.org/html/rfc4511 +// +// DelRequest ::= [APPLICATION 10] LDAPDN + +package ldap + +import ( + ber "github.com/go-asn1-ber/asn1-ber" + + "github.com/mattermost/mattermost/server/public/shared/mlog" +) + +// DelRequest implements an LDAP deletion request +type DelRequest struct { + // DN is the name of the directory entry to delete + DN string + // Controls hold optional controls to send with the request + Controls []Control +} + +func (req *DelRequest) appendTo(envelope *ber.Packet) error { + pkt := ber.Encode(ber.ClassApplication, ber.TypePrimitive, ApplicationDelRequest, req.DN, "Del Request") + pkt.Data.Write([]byte(req.DN)) + + envelope.AppendChild(pkt) + if len(req.Controls) > 0 { + envelope.AppendChild(encodeControls(req.Controls)) + } + + return nil +} + +// NewDelRequest creates a delete request for the given DN and controls +func NewDelRequest(DN string, Controls []Control) *DelRequest { + return &DelRequest{ + DN: DN, + Controls: Controls, + } +} + +// Del executes the given delete request +func (l *Conn) Del(delRequest *DelRequest) error { + msgCtx, err := l.doRequest(delRequest) + if err != nil { + return err + } + defer l.finishMessage(msgCtx) + + packet, err := l.readPacket(msgCtx) + if err != nil { + return err + } + + tag := packet.Children[1].Tag + if tag == ApplicationDelResponse { + err := GetLDAPError(packet) + if err != nil { + return err + } + } else { + l.Debug.Log("Unexpected Response tag", mlog.Uint("tag", tag)) + } + return nil +} diff --git a/vendor/github.com/mattermost/ldap/dn.go b/vendor/github.com/mattermost/ldap/dn.go new file mode 100644 index 00000000..9d32e7fa --- /dev/null +++ b/vendor/github.com/mattermost/ldap/dn.go @@ -0,0 +1,247 @@ +// File contains DN parsing functionality +// +// https://tools.ietf.org/html/rfc4514 +// +// distinguishedName = [ relativeDistinguishedName +// *( COMMA relativeDistinguishedName ) ] +// relativeDistinguishedName = attributeTypeAndValue +// *( PLUS attributeTypeAndValue ) +// attributeTypeAndValue = attributeType EQUALS attributeValue +// attributeType = descr / numericoid +// attributeValue = string / hexstring +// +// ; The following characters are to be escaped when they appear +// ; in the value to be encoded: ESC, one of , leading +// ; SHARP or SPACE, trailing SPACE, and NULL. +// string = [ ( leadchar / pair ) [ *( stringchar / pair ) +// ( trailchar / pair ) ] ] +// +// leadchar = LUTF1 / UTFMB +// LUTF1 = %x01-1F / %x21 / %x24-2A / %x2D-3A / +// %x3D / %x3F-5B / %x5D-7F +// +// trailchar = TUTF1 / UTFMB +// TUTF1 = %x01-1F / %x21 / %x23-2A / %x2D-3A / +// %x3D / %x3F-5B / %x5D-7F +// +// stringchar = SUTF1 / UTFMB +// SUTF1 = %x01-21 / %x23-2A / %x2D-3A / +// %x3D / %x3F-5B / %x5D-7F +// +// pair = ESC ( ESC / special / hexpair ) +// special = escaped / SPACE / SHARP / EQUALS +// escaped = DQUOTE / PLUS / COMMA / SEMI / LANGLE / RANGLE +// hexstring = SHARP 1*hexpair +// hexpair = HEX HEX +// +// where the productions , , , , +// , , , , , , , , +// , , and are defined in [RFC4512]. +// + +package ldap + +import ( + "bytes" + enchex "encoding/hex" + "errors" + "fmt" + "strings" + + "github.com/go-asn1-ber/asn1-ber" +) + +// AttributeTypeAndValue represents an attributeTypeAndValue from https://tools.ietf.org/html/rfc4514 +type AttributeTypeAndValue struct { + // Type is the attribute type + Type string + // Value is the attribute value + Value string +} + +// RelativeDN represents a relativeDistinguishedName from https://tools.ietf.org/html/rfc4514 +type RelativeDN struct { + Attributes []*AttributeTypeAndValue +} + +// DN represents a distinguishedName from https://tools.ietf.org/html/rfc4514 +type DN struct { + RDNs []*RelativeDN +} + +// ParseDN returns a distinguishedName or an error +func ParseDN(str string) (*DN, error) { + dn := new(DN) + dn.RDNs = make([]*RelativeDN, 0) + rdn := new(RelativeDN) + rdn.Attributes = make([]*AttributeTypeAndValue, 0) + buffer := bytes.Buffer{} + attribute := new(AttributeTypeAndValue) + escaping := false + + unescapedTrailingSpaces := 0 + stringFromBuffer := func() string { + s := buffer.String() + s = s[0 : len(s)-unescapedTrailingSpaces] + buffer.Reset() + unescapedTrailingSpaces = 0 + return s + } + + for i := 0; i < len(str); i++ { + char := str[i] + switch { + case escaping: + unescapedTrailingSpaces = 0 + escaping = false + switch char { + case ' ', '"', '#', '+', ',', ';', '<', '=', '>', '\\': + buffer.WriteByte(char) + continue + } + // Not a special character, assume hex encoded octet + if len(str) == i+1 { + return nil, errors.New("got corrupted escaped character") + } + + dst := []byte{0} + n, err := enchex.Decode([]byte(dst), []byte(str[i:i+2])) + if err != nil { + return nil, fmt.Errorf("failed to decode escaped character: %s", err) + } else if n != 1 { + return nil, fmt.Errorf("expected 1 byte when un-escaping, got %d", n) + } + buffer.WriteByte(dst[0]) + i++ + case char == '\\': + unescapedTrailingSpaces = 0 + escaping = true + case char == '=': + attribute.Type = stringFromBuffer() + // Special case: If the first character in the value is # the + // following data is BER encoded so we can just fast forward + // and decode. + if len(str) > i+1 && str[i+1] == '#' { + i += 2 + index := strings.IndexAny(str[i:], ",+") + data := str + if index > 0 { + data = str[i : i+index] + } else { + data = str[i:] + } + rawBER, err := enchex.DecodeString(data) + if err != nil { + return nil, fmt.Errorf("failed to decode BER encoding: %s", err) + } + packet, err := ber.DecodePacketErr(rawBER) + if err != nil { + return nil, fmt.Errorf("failed to decode BER packet: %s", err) + } + buffer.WriteString(packet.Data.String()) + i += len(data) - 1 + } + case char == ',' || char == '+': + // We're done with this RDN or value, push it + if len(attribute.Type) == 0 { + return nil, errors.New("incomplete type, value pair") + } + attribute.Value = stringFromBuffer() + rdn.Attributes = append(rdn.Attributes, attribute) + attribute = new(AttributeTypeAndValue) + if char == ',' { + dn.RDNs = append(dn.RDNs, rdn) + rdn = new(RelativeDN) + rdn.Attributes = make([]*AttributeTypeAndValue, 0) + } + case char == ' ' && buffer.Len() == 0: + // ignore unescaped leading spaces + continue + default: + if char == ' ' { + // Track unescaped spaces in case they are trailing and we need to remove them + unescapedTrailingSpaces++ + } else { + // Reset if we see a non-space char + unescapedTrailingSpaces = 0 + } + buffer.WriteByte(char) + } + } + if buffer.Len() > 0 { + if len(attribute.Type) == 0 { + return nil, errors.New("DN ended with incomplete type, value pair") + } + attribute.Value = stringFromBuffer() + rdn.Attributes = append(rdn.Attributes, attribute) + dn.RDNs = append(dn.RDNs, rdn) + } + return dn, nil +} + +// Equal returns true if the DNs are equal as defined by rfc4517 4.2.15 (distinguishedNameMatch). +// Returns true if they have the same number of relative distinguished names +// and corresponding relative distinguished names (by position) are the same. +func (d *DN) Equal(other *DN) bool { + if len(d.RDNs) != len(other.RDNs) { + return false + } + for i := range d.RDNs { + if !d.RDNs[i].Equal(other.RDNs[i]) { + return false + } + } + return true +} + +// AncestorOf returns true if the other DN consists of at least one RDN followed by all the RDNs of the current DN. +// "ou=widgets,o=acme.com" is an ancestor of "ou=sprockets,ou=widgets,o=acme.com" +// "ou=widgets,o=acme.com" is not an ancestor of "ou=sprockets,ou=widgets,o=foo.com" +// "ou=widgets,o=acme.com" is not an ancestor of "ou=widgets,o=acme.com" +func (d *DN) AncestorOf(other *DN) bool { + if len(d.RDNs) >= len(other.RDNs) { + return false + } + // Take the last `len(d.RDNs)` RDNs from the other DN to compare against + otherRDNs := other.RDNs[len(other.RDNs)-len(d.RDNs):] + for i := range d.RDNs { + if !d.RDNs[i].Equal(otherRDNs[i]) { + return false + } + } + return true +} + +// Equal returns true if the RelativeDNs are equal as defined by rfc4517 4.2.15 (distinguishedNameMatch). +// Relative distinguished names are the same if and only if they have the same number of AttributeTypeAndValues +// and each attribute of the first RDN is the same as the attribute of the second RDN with the same attribute type. +// The order of attributes is not significant. +// Case of attribute types is not significant. +func (r *RelativeDN) Equal(other *RelativeDN) bool { + if len(r.Attributes) != len(other.Attributes) { + return false + } + return r.hasAllAttributes(other.Attributes) && other.hasAllAttributes(r.Attributes) +} + +func (r *RelativeDN) hasAllAttributes(attrs []*AttributeTypeAndValue) bool { + for _, attr := range attrs { + found := false + for _, myattr := range r.Attributes { + if myattr.Equal(attr) { + found = true + break + } + } + if !found { + return false + } + } + return true +} + +// Equal returns true if the AttributeTypeAndValue is equivalent to the specified AttributeTypeAndValue +// Case of the attribute type is not significant +func (a *AttributeTypeAndValue) Equal(other *AttributeTypeAndValue) bool { + return strings.EqualFold(a.Type, other.Type) && a.Value == other.Value +} diff --git a/vendor/github.com/mattermost/ldap/doc.go b/vendor/github.com/mattermost/ldap/doc.go new file mode 100644 index 00000000..f20d39bc --- /dev/null +++ b/vendor/github.com/mattermost/ldap/doc.go @@ -0,0 +1,4 @@ +/* +Package ldap provides basic LDAP v3 functionality. +*/ +package ldap diff --git a/vendor/github.com/mattermost/ldap/error.go b/vendor/github.com/mattermost/ldap/error.go new file mode 100644 index 00000000..b1fda2d8 --- /dev/null +++ b/vendor/github.com/mattermost/ldap/error.go @@ -0,0 +1,236 @@ +package ldap + +import ( + "fmt" + + ber "github.com/go-asn1-ber/asn1-ber" +) + +// LDAP Result Codes +const ( + LDAPResultSuccess = 0 + LDAPResultOperationsError = 1 + LDAPResultProtocolError = 2 + LDAPResultTimeLimitExceeded = 3 + LDAPResultSizeLimitExceeded = 4 + LDAPResultCompareFalse = 5 + LDAPResultCompareTrue = 6 + LDAPResultAuthMethodNotSupported = 7 + LDAPResultStrongAuthRequired = 8 + LDAPResultReferral = 10 + LDAPResultAdminLimitExceeded = 11 + LDAPResultUnavailableCriticalExtension = 12 + LDAPResultConfidentialityRequired = 13 + LDAPResultSaslBindInProgress = 14 + LDAPResultNoSuchAttribute = 16 + LDAPResultUndefinedAttributeType = 17 + LDAPResultInappropriateMatching = 18 + LDAPResultConstraintViolation = 19 + LDAPResultAttributeOrValueExists = 20 + LDAPResultInvalidAttributeSyntax = 21 + LDAPResultNoSuchObject = 32 + LDAPResultAliasProblem = 33 + LDAPResultInvalidDNSyntax = 34 + LDAPResultIsLeaf = 35 + LDAPResultAliasDereferencingProblem = 36 + LDAPResultInappropriateAuthentication = 48 + LDAPResultInvalidCredentials = 49 + LDAPResultInsufficientAccessRights = 50 + LDAPResultBusy = 51 + LDAPResultUnavailable = 52 + LDAPResultUnwillingToPerform = 53 + LDAPResultLoopDetect = 54 + LDAPResultSortControlMissing = 60 + LDAPResultOffsetRangeError = 61 + LDAPResultNamingViolation = 64 + LDAPResultObjectClassViolation = 65 + LDAPResultNotAllowedOnNonLeaf = 66 + LDAPResultNotAllowedOnRDN = 67 + LDAPResultEntryAlreadyExists = 68 + LDAPResultObjectClassModsProhibited = 69 + LDAPResultResultsTooLarge = 70 + LDAPResultAffectsMultipleDSAs = 71 + LDAPResultVirtualListViewErrorOrControlError = 76 + LDAPResultOther = 80 + LDAPResultServerDown = 81 + LDAPResultLocalError = 82 + LDAPResultEncodingError = 83 + LDAPResultDecodingError = 84 + LDAPResultTimeout = 85 + LDAPResultAuthUnknown = 86 + LDAPResultFilterError = 87 + LDAPResultUserCanceled = 88 + LDAPResultParamError = 89 + LDAPResultNoMemory = 90 + LDAPResultConnectError = 91 + LDAPResultNotSupported = 92 + LDAPResultControlNotFound = 93 + LDAPResultNoResultsReturned = 94 + LDAPResultMoreResultsToReturn = 95 + LDAPResultClientLoop = 96 + LDAPResultReferralLimitExceeded = 97 + LDAPResultInvalidResponse = 100 + LDAPResultAmbiguousResponse = 101 + LDAPResultTLSNotSupported = 112 + LDAPResultIntermediateResponse = 113 + LDAPResultUnknownType = 114 + LDAPResultCanceled = 118 + LDAPResultNoSuchOperation = 119 + LDAPResultTooLate = 120 + LDAPResultCannotCancel = 121 + LDAPResultAssertionFailed = 122 + LDAPResultAuthorizationDenied = 123 + LDAPResultSyncRefreshRequired = 4096 + + ErrorNetwork = 200 + ErrorFilterCompile = 201 + ErrorFilterDecompile = 202 + ErrorDebugging = 203 + ErrorUnexpectedMessage = 204 + ErrorUnexpectedResponse = 205 + ErrorEmptyPassword = 206 +) + +// LDAPResultCodeMap contains string descriptions for LDAP error codes +var LDAPResultCodeMap = map[uint16]string{ + LDAPResultSuccess: "Success", + LDAPResultOperationsError: "Operations Error", + LDAPResultProtocolError: "Protocol Error", + LDAPResultTimeLimitExceeded: "Time Limit Exceeded", + LDAPResultSizeLimitExceeded: "Size Limit Exceeded", + LDAPResultCompareFalse: "Compare False", + LDAPResultCompareTrue: "Compare True", + LDAPResultAuthMethodNotSupported: "Auth Method Not Supported", + LDAPResultStrongAuthRequired: "Strong Auth Required", + LDAPResultReferral: "Referral", + LDAPResultAdminLimitExceeded: "Admin Limit Exceeded", + LDAPResultUnavailableCriticalExtension: "Unavailable Critical Extension", + LDAPResultConfidentialityRequired: "Confidentiality Required", + LDAPResultSaslBindInProgress: "Sasl Bind In Progress", + LDAPResultNoSuchAttribute: "No Such Attribute", + LDAPResultUndefinedAttributeType: "Undefined Attribute Type", + LDAPResultInappropriateMatching: "Inappropriate Matching", + LDAPResultConstraintViolation: "Constraint Violation", + LDAPResultAttributeOrValueExists: "Attribute Or Value Exists", + LDAPResultInvalidAttributeSyntax: "Invalid Attribute Syntax", + LDAPResultNoSuchObject: "No Such Object", + LDAPResultAliasProblem: "Alias Problem", + LDAPResultInvalidDNSyntax: "Invalid DN Syntax", + LDAPResultIsLeaf: "Is Leaf", + LDAPResultAliasDereferencingProblem: "Alias Dereferencing Problem", + LDAPResultInappropriateAuthentication: "Inappropriate Authentication", + LDAPResultInvalidCredentials: "Invalid Credentials", + LDAPResultInsufficientAccessRights: "Insufficient Access Rights", + LDAPResultBusy: "Busy", + LDAPResultUnavailable: "Unavailable", + LDAPResultUnwillingToPerform: "Unwilling To Perform", + LDAPResultLoopDetect: "Loop Detect", + LDAPResultSortControlMissing: "Sort Control Missing", + LDAPResultOffsetRangeError: "Result Offset Range Error", + LDAPResultNamingViolation: "Naming Violation", + LDAPResultObjectClassViolation: "Object Class Violation", + LDAPResultResultsTooLarge: "Results Too Large", + LDAPResultNotAllowedOnNonLeaf: "Not Allowed On Non Leaf", + LDAPResultNotAllowedOnRDN: "Not Allowed On RDN", + LDAPResultEntryAlreadyExists: "Entry Already Exists", + LDAPResultObjectClassModsProhibited: "Object Class Mods Prohibited", + LDAPResultAffectsMultipleDSAs: "Affects Multiple DSAs", + LDAPResultVirtualListViewErrorOrControlError: "Failed because of a problem related to the virtual list view", + LDAPResultOther: "Other", + LDAPResultServerDown: "Cannot establish a connection", + LDAPResultLocalError: "An error occurred", + LDAPResultEncodingError: "LDAP encountered an error while encoding", + LDAPResultDecodingError: "LDAP encountered an error while decoding", + LDAPResultTimeout: "LDAP timeout while waiting for a response from the server", + LDAPResultAuthUnknown: "The auth method requested in a bind request is unknown", + LDAPResultFilterError: "An error occurred while encoding the given search filter", + LDAPResultUserCanceled: "The user canceled the operation", + LDAPResultParamError: "An invalid parameter was specified", + LDAPResultNoMemory: "Out of memory error", + LDAPResultConnectError: "A connection to the server could not be established", + LDAPResultNotSupported: "An attempt has been made to use a feature not supported LDAP", + LDAPResultControlNotFound: "The controls required to perform the requested operation were not found", + LDAPResultNoResultsReturned: "No results were returned from the server", + LDAPResultMoreResultsToReturn: "There are more results in the chain of results", + LDAPResultClientLoop: "A loop has been detected. For example when following referrals", + LDAPResultReferralLimitExceeded: "The referral hop limit has been exceeded", + LDAPResultCanceled: "Operation was canceled", + LDAPResultNoSuchOperation: "Server has no knowledge of the operation requested for cancellation", + LDAPResultTooLate: "Too late to cancel the outstanding operation", + LDAPResultCannotCancel: "The identified operation does not support cancellation or the cancel operation cannot be performed", + LDAPResultAssertionFailed: "An assertion control given in the LDAP operation evaluated to false causing the operation to not be performed", + LDAPResultSyncRefreshRequired: "Refresh Required", + LDAPResultInvalidResponse: "Invalid Response", + LDAPResultAmbiguousResponse: "Ambiguous Response", + LDAPResultTLSNotSupported: "Tls Not Supported", + LDAPResultIntermediateResponse: "Intermediate Response", + LDAPResultUnknownType: "Unknown Type", + LDAPResultAuthorizationDenied: "Authorization Denied", + + ErrorNetwork: "Network Error", + ErrorFilterCompile: "Filter Compile Error", + ErrorFilterDecompile: "Filter Decompile Error", + ErrorDebugging: "Debugging Error", + ErrorUnexpectedMessage: "Unexpected Message", + ErrorUnexpectedResponse: "Unexpected Response", + ErrorEmptyPassword: "Empty password not allowed by the client", +} + +// Error holds LDAP error information +type Error struct { + // Err is the underlying error + Err error + // ResultCode is the LDAP error code + ResultCode uint16 + // MatchedDN is the matchedDN returned if any + MatchedDN string +} + +func (e *Error) Error() string { + return fmt.Sprintf("LDAP Result Code %d %q: %s", e.ResultCode, LDAPResultCodeMap[e.ResultCode], e.Err.Error()) +} + +// GetLDAPError creates an Error out of a BER packet representing a LDAPResult +// The return is an error object. It can be casted to a Error structure. +// This function returns nil if resultCode in the LDAPResult sequence is success(0). +func GetLDAPError(packet *ber.Packet) error { + if packet == nil { + return &Error{ResultCode: ErrorUnexpectedResponse, Err: fmt.Errorf("Empty packet")} + } + + if len(packet.Children) >= 2 { + response := packet.Children[1] + if response == nil { + return &Error{ResultCode: ErrorUnexpectedResponse, Err: fmt.Errorf("Empty response in packet")} + } + if response.ClassType == ber.ClassApplication && response.TagType == ber.TypeConstructed && len(response.Children) >= 3 { + resultCode := uint16(response.Children[0].Value.(int64)) + if resultCode == 0 { // No error + return nil + } + return &Error{ResultCode: resultCode, MatchedDN: response.Children[1].Value.(string), + Err: fmt.Errorf("%s", response.Children[2].Value.(string))} + } + } + + return &Error{ResultCode: ErrorNetwork, Err: fmt.Errorf("Invalid packet format")} +} + +// NewError creates an LDAP error with the given code and underlying error +func NewError(resultCode uint16, err error) error { + return &Error{ResultCode: resultCode, Err: err} +} + +// IsErrorWithCode returns true if the given error is an LDAP error with the given result code +func IsErrorWithCode(err error, desiredResultCode uint16) bool { + if err == nil { + return false + } + + serverError, ok := err.(*Error) + if !ok { + return false + } + + return serverError.ResultCode == desiredResultCode +} diff --git a/vendor/github.com/mattermost/ldap/filter.go b/vendor/github.com/mattermost/ldap/filter.go new file mode 100644 index 00000000..a3875506 --- /dev/null +++ b/vendor/github.com/mattermost/ldap/filter.go @@ -0,0 +1,465 @@ +package ldap + +import ( + "bytes" + hexpac "encoding/hex" + "errors" + "fmt" + "strings" + "unicode/utf8" + + "github.com/go-asn1-ber/asn1-ber" +) + +// Filter choices +const ( + FilterAnd = 0 + FilterOr = 1 + FilterNot = 2 + FilterEqualityMatch = 3 + FilterSubstrings = 4 + FilterGreaterOrEqual = 5 + FilterLessOrEqual = 6 + FilterPresent = 7 + FilterApproxMatch = 8 + FilterExtensibleMatch = 9 +) + +// FilterMap contains human readable descriptions of Filter choices +var FilterMap = map[uint64]string{ + FilterAnd: "And", + FilterOr: "Or", + FilterNot: "Not", + FilterEqualityMatch: "Equality Match", + FilterSubstrings: "Substrings", + FilterGreaterOrEqual: "Greater Or Equal", + FilterLessOrEqual: "Less Or Equal", + FilterPresent: "Present", + FilterApproxMatch: "Approx Match", + FilterExtensibleMatch: "Extensible Match", +} + +// SubstringFilter options +const ( + FilterSubstringsInitial = 0 + FilterSubstringsAny = 1 + FilterSubstringsFinal = 2 +) + +// FilterSubstringsMap contains human readable descriptions of SubstringFilter choices +var FilterSubstringsMap = map[uint64]string{ + FilterSubstringsInitial: "Substrings Initial", + FilterSubstringsAny: "Substrings Any", + FilterSubstringsFinal: "Substrings Final", +} + +// MatchingRuleAssertion choices +const ( + MatchingRuleAssertionMatchingRule = 1 + MatchingRuleAssertionType = 2 + MatchingRuleAssertionMatchValue = 3 + MatchingRuleAssertionDNAttributes = 4 +) + +// MatchingRuleAssertionMap contains human readable descriptions of MatchingRuleAssertion choices +var MatchingRuleAssertionMap = map[uint64]string{ + MatchingRuleAssertionMatchingRule: "Matching Rule Assertion Matching Rule", + MatchingRuleAssertionType: "Matching Rule Assertion Type", + MatchingRuleAssertionMatchValue: "Matching Rule Assertion Match Value", + MatchingRuleAssertionDNAttributes: "Matching Rule Assertion DN Attributes", +} + +// CompileFilter converts a string representation of a filter into a BER-encoded packet +func CompileFilter(filter string) (*ber.Packet, error) { + if len(filter) == 0 || filter[0] != '(' { + return nil, NewError(ErrorFilterCompile, errors.New("ldap: filter does not start with an '('")) + } + packet, pos, err := compileFilter(filter, 1) + if err != nil { + return nil, err + } + switch { + case pos > len(filter): + return nil, NewError(ErrorFilterCompile, errors.New("ldap: unexpected end of filter")) + case pos < len(filter): + return nil, NewError(ErrorFilterCompile, errors.New("ldap: finished compiling filter with extra at end: "+fmt.Sprint(filter[pos:]))) + } + return packet, nil +} + +// DecompileFilter converts a packet representation of a filter into a string representation +func DecompileFilter(packet *ber.Packet) (ret string, err error) { + defer func() { + if r := recover(); r != nil { + err = NewError(ErrorFilterDecompile, errors.New("ldap: error decompiling filter")) + } + }() + ret = "(" + err = nil + childStr := "" + + switch packet.Tag { + case FilterAnd: + ret += "&" + for _, child := range packet.Children { + childStr, err = DecompileFilter(child) + if err != nil { + return + } + ret += childStr + } + case FilterOr: + ret += "|" + for _, child := range packet.Children { + childStr, err = DecompileFilter(child) + if err != nil { + return + } + ret += childStr + } + case FilterNot: + ret += "!" + childStr, err = DecompileFilter(packet.Children[0]) + if err != nil { + return + } + ret += childStr + + case FilterSubstrings: + ret += ber.DecodeString(packet.Children[0].Data.Bytes()) + ret += "=" + for i, child := range packet.Children[1].Children { + if i == 0 && child.Tag != FilterSubstringsInitial { + ret += "*" + } + ret += EscapeFilter(ber.DecodeString(child.Data.Bytes())) + if child.Tag != FilterSubstringsFinal { + ret += "*" + } + } + case FilterEqualityMatch: + ret += ber.DecodeString(packet.Children[0].Data.Bytes()) + ret += "=" + ret += EscapeFilter(ber.DecodeString(packet.Children[1].Data.Bytes())) + case FilterGreaterOrEqual: + ret += ber.DecodeString(packet.Children[0].Data.Bytes()) + ret += ">=" + ret += EscapeFilter(ber.DecodeString(packet.Children[1].Data.Bytes())) + case FilterLessOrEqual: + ret += ber.DecodeString(packet.Children[0].Data.Bytes()) + ret += "<=" + ret += EscapeFilter(ber.DecodeString(packet.Children[1].Data.Bytes())) + case FilterPresent: + ret += ber.DecodeString(packet.Data.Bytes()) + ret += "=*" + case FilterApproxMatch: + ret += ber.DecodeString(packet.Children[0].Data.Bytes()) + ret += "~=" + ret += EscapeFilter(ber.DecodeString(packet.Children[1].Data.Bytes())) + case FilterExtensibleMatch: + attr := "" + dnAttributes := false + matchingRule := "" + value := "" + + for _, child := range packet.Children { + switch child.Tag { + case MatchingRuleAssertionMatchingRule: + matchingRule = ber.DecodeString(child.Data.Bytes()) + case MatchingRuleAssertionType: + attr = ber.DecodeString(child.Data.Bytes()) + case MatchingRuleAssertionMatchValue: + value = ber.DecodeString(child.Data.Bytes()) + case MatchingRuleAssertionDNAttributes: + dnAttributes = child.Value.(bool) + } + } + + if len(attr) > 0 { + ret += attr + } + if dnAttributes { + ret += ":dn" + } + if len(matchingRule) > 0 { + ret += ":" + ret += matchingRule + } + ret += ":=" + ret += EscapeFilter(value) + } + + ret += ")" + return +} + +func compileFilterSet(filter string, pos int, parent *ber.Packet) (int, error) { + for pos < len(filter) && filter[pos] == '(' { + child, newPos, err := compileFilter(filter, pos+1) + if err != nil { + return pos, err + } + pos = newPos + parent.AppendChild(child) + } + if pos == len(filter) { + return pos, NewError(ErrorFilterCompile, errors.New("ldap: unexpected end of filter")) + } + + return pos + 1, nil +} + +func compileFilter(filter string, pos int) (*ber.Packet, int, error) { + var ( + packet *ber.Packet + err error + ) + + defer func() { + if r := recover(); r != nil { + err = NewError(ErrorFilterCompile, errors.New("ldap: error compiling filter")) + } + }() + newPos := pos + + currentRune, currentWidth := utf8.DecodeRuneInString(filter[newPos:]) + + switch currentRune { + case utf8.RuneError: + return nil, 0, NewError(ErrorFilterCompile, fmt.Errorf("ldap: error reading rune at position %d", newPos)) + case '(': + packet, newPos, err = compileFilter(filter, pos+currentWidth) + newPos++ + return packet, newPos, err + case '&': + packet = ber.Encode(ber.ClassContext, ber.TypeConstructed, FilterAnd, nil, FilterMap[FilterAnd]) + newPos, err = compileFilterSet(filter, pos+currentWidth, packet) + return packet, newPos, err + case '|': + packet = ber.Encode(ber.ClassContext, ber.TypeConstructed, FilterOr, nil, FilterMap[FilterOr]) + newPos, err = compileFilterSet(filter, pos+currentWidth, packet) + return packet, newPos, err + case '!': + packet = ber.Encode(ber.ClassContext, ber.TypeConstructed, FilterNot, nil, FilterMap[FilterNot]) + var child *ber.Packet + child, newPos, err = compileFilter(filter, pos+currentWidth) + packet.AppendChild(child) + return packet, newPos, err + default: + const ( + stateReadingAttr = 0 + stateReadingExtensibleMatchingRule = 1 + stateReadingCondition = 2 + ) + + state := stateReadingAttr + + attribute := "" + extensibleDNAttributes := false + extensibleMatchingRule := "" + condition := "" + + for newPos < len(filter) { + remainingFilter := filter[newPos:] + currentRune, currentWidth = utf8.DecodeRuneInString(remainingFilter) + if currentRune == ')' { + break + } + if currentRune == utf8.RuneError { + return packet, newPos, NewError(ErrorFilterCompile, fmt.Errorf("ldap: error reading rune at position %d", newPos)) + } + + switch state { + case stateReadingAttr: + switch { + // Extensible rule, with only DN-matching + case currentRune == ':' && strings.HasPrefix(remainingFilter, ":dn:="): + packet = ber.Encode(ber.ClassContext, ber.TypeConstructed, FilterExtensibleMatch, nil, FilterMap[FilterExtensibleMatch]) + extensibleDNAttributes = true + state = stateReadingCondition + newPos += 5 + + // Extensible rule, with DN-matching and a matching OID + case currentRune == ':' && strings.HasPrefix(remainingFilter, ":dn:"): + packet = ber.Encode(ber.ClassContext, ber.TypeConstructed, FilterExtensibleMatch, nil, FilterMap[FilterExtensibleMatch]) + extensibleDNAttributes = true + state = stateReadingExtensibleMatchingRule + newPos += 4 + + // Extensible rule, with attr only + case currentRune == ':' && strings.HasPrefix(remainingFilter, ":="): + packet = ber.Encode(ber.ClassContext, ber.TypeConstructed, FilterExtensibleMatch, nil, FilterMap[FilterExtensibleMatch]) + state = stateReadingCondition + newPos += 2 + + // Extensible rule, with no DN attribute matching + case currentRune == ':': + packet = ber.Encode(ber.ClassContext, ber.TypeConstructed, FilterExtensibleMatch, nil, FilterMap[FilterExtensibleMatch]) + state = stateReadingExtensibleMatchingRule + newPos++ + + // Equality condition + case currentRune == '=': + packet = ber.Encode(ber.ClassContext, ber.TypeConstructed, FilterEqualityMatch, nil, FilterMap[FilterEqualityMatch]) + state = stateReadingCondition + newPos++ + + // Greater-than or equal + case currentRune == '>' && strings.HasPrefix(remainingFilter, ">="): + packet = ber.Encode(ber.ClassContext, ber.TypeConstructed, FilterGreaterOrEqual, nil, FilterMap[FilterGreaterOrEqual]) + state = stateReadingCondition + newPos += 2 + + // Less-than or equal + case currentRune == '<' && strings.HasPrefix(remainingFilter, "<="): + packet = ber.Encode(ber.ClassContext, ber.TypeConstructed, FilterLessOrEqual, nil, FilterMap[FilterLessOrEqual]) + state = stateReadingCondition + newPos += 2 + + // Approx + case currentRune == '~' && strings.HasPrefix(remainingFilter, "~="): + packet = ber.Encode(ber.ClassContext, ber.TypeConstructed, FilterApproxMatch, nil, FilterMap[FilterApproxMatch]) + state = stateReadingCondition + newPos += 2 + + // Still reading the attribute name + default: + attribute += fmt.Sprintf("%c", currentRune) + newPos += currentWidth + } + + case stateReadingExtensibleMatchingRule: + switch { + + // Matching rule OID is done + case currentRune == ':' && strings.HasPrefix(remainingFilter, ":="): + state = stateReadingCondition + newPos += 2 + + // Still reading the matching rule oid + default: + extensibleMatchingRule += fmt.Sprintf("%c", currentRune) + newPos += currentWidth + } + + case stateReadingCondition: + // append to the condition + condition += fmt.Sprintf("%c", currentRune) + newPos += currentWidth + } + } + + if newPos == len(filter) { + err = NewError(ErrorFilterCompile, errors.New("ldap: unexpected end of filter")) + return packet, newPos, err + } + if packet == nil { + err = NewError(ErrorFilterCompile, errors.New("ldap: error parsing filter")) + return packet, newPos, err + } + + switch { + case packet.Tag == FilterExtensibleMatch: + // MatchingRuleAssertion ::= SEQUENCE { + // matchingRule [1] MatchingRuleID OPTIONAL, + // type [2] AttributeDescription OPTIONAL, + // matchValue [3] AssertionValue, + // dnAttributes [4] BOOLEAN DEFAULT FALSE + // } + + // Include the matching rule oid, if specified + if len(extensibleMatchingRule) > 0 { + packet.AppendChild(ber.NewString(ber.ClassContext, ber.TypePrimitive, MatchingRuleAssertionMatchingRule, extensibleMatchingRule, MatchingRuleAssertionMap[MatchingRuleAssertionMatchingRule])) + } + + // Include the attribute, if specified + if len(attribute) > 0 { + packet.AppendChild(ber.NewString(ber.ClassContext, ber.TypePrimitive, MatchingRuleAssertionType, attribute, MatchingRuleAssertionMap[MatchingRuleAssertionType])) + } + + // Add the value (only required child) + encodedString, encodeErr := escapedStringToEncodedBytes(condition) + if encodeErr != nil { + return packet, newPos, encodeErr + } + packet.AppendChild(ber.NewString(ber.ClassContext, ber.TypePrimitive, MatchingRuleAssertionMatchValue, encodedString, MatchingRuleAssertionMap[MatchingRuleAssertionMatchValue])) + + // Defaults to false, so only include in the sequence if true + if extensibleDNAttributes { + packet.AppendChild(ber.NewBoolean(ber.ClassContext, ber.TypePrimitive, MatchingRuleAssertionDNAttributes, extensibleDNAttributes, MatchingRuleAssertionMap[MatchingRuleAssertionDNAttributes])) + } + + case packet.Tag == FilterEqualityMatch && condition == "*": + packet = ber.NewString(ber.ClassContext, ber.TypePrimitive, FilterPresent, attribute, FilterMap[FilterPresent]) + case packet.Tag == FilterEqualityMatch && strings.Contains(condition, "*"): + packet.AppendChild(ber.NewString(ber.ClassUniversal, ber.TypePrimitive, ber.TagOctetString, attribute, "Attribute")) + packet.Tag = FilterSubstrings + packet.Description = FilterMap[uint64(packet.Tag)] + seq := ber.Encode(ber.ClassUniversal, ber.TypeConstructed, ber.TagSequence, nil, "Substrings") + parts := strings.Split(condition, "*") + for i, part := range parts { + if part == "" { + continue + } + var tag ber.Tag + switch i { + case 0: + tag = FilterSubstringsInitial + case len(parts) - 1: + tag = FilterSubstringsFinal + default: + tag = FilterSubstringsAny + } + encodedString, encodeErr := escapedStringToEncodedBytes(part) + if encodeErr != nil { + return packet, newPos, encodeErr + } + seq.AppendChild(ber.NewString(ber.ClassContext, ber.TypePrimitive, tag, encodedString, FilterSubstringsMap[uint64(tag)])) + } + packet.AppendChild(seq) + default: + encodedString, encodeErr := escapedStringToEncodedBytes(condition) + if encodeErr != nil { + return packet, newPos, encodeErr + } + packet.AppendChild(ber.NewString(ber.ClassUniversal, ber.TypePrimitive, ber.TagOctetString, attribute, "Attribute")) + packet.AppendChild(ber.NewString(ber.ClassUniversal, ber.TypePrimitive, ber.TagOctetString, encodedString, "Condition")) + } + + newPos += currentWidth + return packet, newPos, err + } +} + +// Convert from "ABC\xx\xx\xx" form to literal bytes for transport +func escapedStringToEncodedBytes(escapedString string) (string, error) { + var buffer bytes.Buffer + i := 0 + for i < len(escapedString) { + currentRune, currentWidth := utf8.DecodeRuneInString(escapedString[i:]) + if currentRune == utf8.RuneError { + return "", NewError(ErrorFilterCompile, fmt.Errorf("ldap: error reading rune at position %d", i)) + } + + // Check for escaped hex characters and convert them to their literal value for transport. + if currentRune == '\\' { + // http://tools.ietf.org/search/rfc4515 + // \ (%x5C) is not a valid character unless it is followed by two HEX characters due to not + // being a member of UTF1SUBSET. + if i+2 > len(escapedString) { + return "", NewError(ErrorFilterCompile, errors.New("ldap: missing characters for escape in filter")) + } + escByte, decodeErr := hexpac.DecodeString(escapedString[i+1 : i+3]) + if decodeErr != nil { + return "", NewError(ErrorFilterCompile, errors.New("ldap: invalid characters for escape in filter")) + } + buffer.WriteByte(escByte[0]) + i += 2 // +1 from end of loop, so 3 total for \xx. + } else { + buffer.WriteRune(currentRune) + } + + i += currentWidth + } + return buffer.String(), nil +} diff --git a/vendor/github.com/mattermost/ldap/ldap.go b/vendor/github.com/mattermost/ldap/ldap.go new file mode 100644 index 00000000..7dbc951a --- /dev/null +++ b/vendor/github.com/mattermost/ldap/ldap.go @@ -0,0 +1,345 @@ +package ldap + +import ( + "fmt" + "io/ioutil" + "os" + + ber "github.com/go-asn1-ber/asn1-ber" +) + +// LDAP Application Codes +const ( + ApplicationBindRequest = 0 + ApplicationBindResponse = 1 + ApplicationUnbindRequest = 2 + ApplicationSearchRequest = 3 + ApplicationSearchResultEntry = 4 + ApplicationSearchResultDone = 5 + ApplicationModifyRequest = 6 + ApplicationModifyResponse = 7 + ApplicationAddRequest = 8 + ApplicationAddResponse = 9 + ApplicationDelRequest = 10 + ApplicationDelResponse = 11 + ApplicationModifyDNRequest = 12 + ApplicationModifyDNResponse = 13 + ApplicationCompareRequest = 14 + ApplicationCompareResponse = 15 + ApplicationAbandonRequest = 16 + ApplicationSearchResultReference = 19 + ApplicationExtendedRequest = 23 + ApplicationExtendedResponse = 24 +) + +// ApplicationMap contains human readable descriptions of LDAP Application Codes +var ApplicationMap = map[uint8]string{ + ApplicationBindRequest: "Bind Request", + ApplicationBindResponse: "Bind Response", + ApplicationUnbindRequest: "Unbind Request", + ApplicationSearchRequest: "Search Request", + ApplicationSearchResultEntry: "Search Result Entry", + ApplicationSearchResultDone: "Search Result Done", + ApplicationModifyRequest: "Modify Request", + ApplicationModifyResponse: "Modify Response", + ApplicationAddRequest: "Add Request", + ApplicationAddResponse: "Add Response", + ApplicationDelRequest: "Del Request", + ApplicationDelResponse: "Del Response", + ApplicationModifyDNRequest: "Modify DN Request", + ApplicationModifyDNResponse: "Modify DN Response", + ApplicationCompareRequest: "Compare Request", + ApplicationCompareResponse: "Compare Response", + ApplicationAbandonRequest: "Abandon Request", + ApplicationSearchResultReference: "Search Result Reference", + ApplicationExtendedRequest: "Extended Request", + ApplicationExtendedResponse: "Extended Response", +} + +// Ldap Behera Password Policy Draft 10 (https://tools.ietf.org/html/draft-behera-ldap-password-policy-10) +const ( + BeheraPasswordExpired = 0 + BeheraAccountLocked = 1 + BeheraChangeAfterReset = 2 + BeheraPasswordModNotAllowed = 3 + BeheraMustSupplyOldPassword = 4 + BeheraInsufficientPasswordQuality = 5 + BeheraPasswordTooShort = 6 + BeheraPasswordTooYoung = 7 + BeheraPasswordInHistory = 8 +) + +// BeheraPasswordPolicyErrorMap contains human readable descriptions of Behera Password Policy error codes +var BeheraPasswordPolicyErrorMap = map[int8]string{ + BeheraPasswordExpired: "Password expired", + BeheraAccountLocked: "Account locked", + BeheraChangeAfterReset: "Password must be changed", + BeheraPasswordModNotAllowed: "Policy prevents password modification", + BeheraMustSupplyOldPassword: "Policy requires old password in order to change password", + BeheraInsufficientPasswordQuality: "Password fails quality checks", + BeheraPasswordTooShort: "Password is too short for policy", + BeheraPasswordTooYoung: "Password has been changed too recently", + BeheraPasswordInHistory: "New password is in list of old passwords", +} + +// Adds descriptions to an LDAP Response packet for debugging +func addLDAPDescriptions(packet *ber.Packet) (err error) { + defer func() { + if r := recover(); r != nil { + err = NewError(ErrorDebugging, fmt.Errorf("ldap: cannot process packet to add descriptions: %s", r)) + } + }() + packet.Description = "LDAP Response" + packet.Children[0].Description = "Message ID" + + application := uint8(packet.Children[1].Tag) + packet.Children[1].Description = ApplicationMap[application] + + switch application { + case ApplicationBindRequest: + err = addRequestDescriptions(packet) + case ApplicationBindResponse: + err = addDefaultLDAPResponseDescriptions(packet) + case ApplicationUnbindRequest: + err = addRequestDescriptions(packet) + case ApplicationSearchRequest: + err = addRequestDescriptions(packet) + case ApplicationSearchResultEntry: + packet.Children[1].Children[0].Description = "Object Name" + packet.Children[1].Children[1].Description = "Attributes" + for _, child := range packet.Children[1].Children[1].Children { + child.Description = "Attribute" + child.Children[0].Description = "Attribute Name" + child.Children[1].Description = "Attribute Values" + for _, grandchild := range child.Children[1].Children { + grandchild.Description = "Attribute Value" + } + } + if len(packet.Children) == 3 { + err = addControlDescriptions(packet.Children[2]) + } + case ApplicationSearchResultDone: + err = addDefaultLDAPResponseDescriptions(packet) + case ApplicationModifyRequest: + err = addRequestDescriptions(packet) + case ApplicationModifyResponse: + case ApplicationAddRequest: + err = addRequestDescriptions(packet) + case ApplicationAddResponse: + case ApplicationDelRequest: + err = addRequestDescriptions(packet) + case ApplicationDelResponse: + case ApplicationModifyDNRequest: + err = addRequestDescriptions(packet) + case ApplicationModifyDNResponse: + case ApplicationCompareRequest: + err = addRequestDescriptions(packet) + case ApplicationCompareResponse: + case ApplicationAbandonRequest: + err = addRequestDescriptions(packet) + case ApplicationSearchResultReference: + case ApplicationExtendedRequest: + err = addRequestDescriptions(packet) + case ApplicationExtendedResponse: + } + + return err +} + +func addControlDescriptions(packet *ber.Packet) error { + packet.Description = "Controls" + for _, child := range packet.Children { + var value *ber.Packet + controlType := "" + child.Description = "Control" + switch len(child.Children) { + case 0: + // at least one child is required for control type + return fmt.Errorf("at least one child is required for control type") + + case 1: + // just type, no criticality or value + controlType = child.Children[0].Value.(string) + child.Children[0].Description = "Control Type (" + ControlTypeMap[controlType] + ")" + + case 2: + controlType = child.Children[0].Value.(string) + child.Children[0].Description = "Control Type (" + ControlTypeMap[controlType] + ")" + // Children[1] could be criticality or value (both are optional) + // duck-type on whether this is a boolean + if _, ok := child.Children[1].Value.(bool); ok { + child.Children[1].Description = "Criticality" + } else { + child.Children[1].Description = "Control Value" + value = child.Children[1] + } + + case 3: + // criticality and value present + controlType = child.Children[0].Value.(string) + child.Children[0].Description = "Control Type (" + ControlTypeMap[controlType] + ")" + child.Children[1].Description = "Criticality" + child.Children[2].Description = "Control Value" + value = child.Children[2] + + default: + // more than 3 children is invalid + return fmt.Errorf("more than 3 children for control packet found") + } + + if value == nil { + continue + } + switch controlType { + case ControlTypePaging: + value.Description += " (Paging)" + if value.Value != nil { + valueChildren, err := ber.DecodePacketErr(value.Data.Bytes()) + if err != nil { + return fmt.Errorf("failed to decode data bytes: %s", err) + } + value.Data.Truncate(0) + value.Value = nil + valueChildren.Children[1].Value = valueChildren.Children[1].Data.Bytes() + value.AppendChild(valueChildren) + } + value.Children[0].Description = "Real Search Control Value" + value.Children[0].Children[0].Description = "Paging Size" + value.Children[0].Children[1].Description = "Cookie" + + case ControlTypeBeheraPasswordPolicy: + value.Description += " (Password Policy - Behera Draft)" + if value.Value != nil { + valueChildren, err := ber.DecodePacketErr(value.Data.Bytes()) + if err != nil { + return fmt.Errorf("failed to decode data bytes: %s", err) + } + value.Data.Truncate(0) + value.Value = nil + value.AppendChild(valueChildren) + } + sequence := value.Children[0] + for _, child := range sequence.Children { + if child.Tag == 0 { + //Warning + warningPacket := child.Children[0] + packet, err := ber.DecodePacketErr(warningPacket.Data.Bytes()) + if err != nil { + return fmt.Errorf("failed to decode data bytes: %s", err) + } + val, ok := packet.Value.(int64) + if ok { + if warningPacket.Tag == 0 { + //timeBeforeExpiration + value.Description += " (TimeBeforeExpiration)" + warningPacket.Value = val + } else if warningPacket.Tag == 1 { + //graceAuthNsRemaining + value.Description += " (GraceAuthNsRemaining)" + warningPacket.Value = val + } + } + } else if child.Tag == 1 { + // Error + packet, err := ber.DecodePacketErr(child.Data.Bytes()) + if err != nil { + return fmt.Errorf("failed to decode data bytes: %s", err) + } + val, ok := packet.Value.(int8) + if !ok { + val = -1 + } + child.Description = "Error" + child.Value = val + } + } + } + } + return nil +} + +func addRequestDescriptions(packet *ber.Packet) error { + packet.Description = "LDAP Request" + packet.Children[0].Description = "Message ID" + packet.Children[1].Description = ApplicationMap[uint8(packet.Children[1].Tag)] + if len(packet.Children) == 3 { + return addControlDescriptions(packet.Children[2]) + } + return nil +} + +func addDefaultLDAPResponseDescriptions(packet *ber.Packet) error { + resultCode := uint16(LDAPResultSuccess) + matchedDN := "" + description := "Success" + if err := GetLDAPError(packet); err != nil { + resultCode = err.(*Error).ResultCode + matchedDN = err.(*Error).MatchedDN + description = "Error Message" + } + + packet.Children[1].Children[0].Description = "Result Code (" + LDAPResultCodeMap[resultCode] + ")" + packet.Children[1].Children[1].Description = "Matched DN (" + matchedDN + ")" + packet.Children[1].Children[2].Description = description + if len(packet.Children[1].Children) > 3 { + packet.Children[1].Children[3].Description = "Referral" + } + if len(packet.Children) == 3 { + return addControlDescriptions(packet.Children[2]) + } + return nil +} + +// DebugBinaryFile reads and prints packets from the given filename +func DebugBinaryFile(fileName string) error { + file, err := ioutil.ReadFile(fileName) + if err != nil { + return NewError(ErrorDebugging, err) + } + ber.PrintBytes(os.Stdout, file, "") + packet, err := ber.DecodePacketErr(file) + if err != nil { + return fmt.Errorf("failed to decode packet: %s", err) + } + if err := addLDAPDescriptions(packet); err != nil { + return err + } + ber.PrintPacket(packet) + + return nil +} + +var hex = "0123456789abcdef" + +func mustEscape(c byte) bool { + return c > 0x7f || c == '(' || c == ')' || c == '\\' || c == '*' || c == 0 +} + +// EscapeFilter escapes from the provided LDAP filter string the special +// characters in the set `()*\` and those out of the range 0 < c < 0x80, +// as defined in RFC4515. +func EscapeFilter(filter string) string { + escape := 0 + for i := 0; i < len(filter); i++ { + if mustEscape(filter[i]) { + escape++ + } + } + if escape == 0 { + return filter + } + buf := make([]byte, len(filter)+escape*2) + for i, j := 0, 0; i < len(filter); i++ { + c := filter[i] + if mustEscape(c) { + buf[j+0] = '\\' + buf[j+1] = hex[c>>4] + buf[j+2] = hex[c&0xf] + j += 3 + } else { + buf[j] = c + j++ + } + } + return string(buf) +} diff --git a/vendor/github.com/mattermost/ldap/moddn.go b/vendor/github.com/mattermost/ldap/moddn.go new file mode 100644 index 00000000..55fcf613 --- /dev/null +++ b/vendor/github.com/mattermost/ldap/moddn.go @@ -0,0 +1,86 @@ +// Package ldap - moddn.go contains ModifyDN functionality +// +// https://tools.ietf.org/html/rfc4511 +// +// ModifyDNRequest ::= [APPLICATION 12] SEQUENCE { +// entry LDAPDN, +// newrdn RelativeLDAPDN, +// deleteoldrdn BOOLEAN, +// newSuperior [0] LDAPDN OPTIONAL } +package ldap + +import ( + ber "github.com/go-asn1-ber/asn1-ber" + "github.com/mattermost/mattermost/server/public/shared/mlog" +) + +// ModifyDNRequest holds the request to modify a DN +type ModifyDNRequest struct { + DN string + NewRDN string + DeleteOldRDN bool + NewSuperior string +} + +// NewModifyDNRequest creates a new request which can be passed to ModifyDN(). +// +// To move an object in the tree, set the "newSup" to the new parent entry DN. Use an +// empty string for just changing the object's RDN. +// +// For moving the object without renaming, the "rdn" must be the first +// RDN of the given DN. +// +// A call like +// +// mdnReq := NewModifyDNRequest("uid=someone,dc=example,dc=org", "uid=newname", true, "") +// +// will setup the request to just rename uid=someone,dc=example,dc=org to +// uid=newname,dc=example,dc=org. +func NewModifyDNRequest(dn string, rdn string, delOld bool, newSup string) *ModifyDNRequest { + return &ModifyDNRequest{ + DN: dn, + NewRDN: rdn, + DeleteOldRDN: delOld, + NewSuperior: newSup, + } +} + +func (req *ModifyDNRequest) appendTo(envelope *ber.Packet) error { + pkt := ber.Encode(ber.ClassApplication, ber.TypeConstructed, ApplicationModifyDNRequest, nil, "Modify DN Request") + pkt.AppendChild(ber.NewString(ber.ClassUniversal, ber.TypePrimitive, ber.TagOctetString, req.DN, "DN")) + pkt.AppendChild(ber.NewString(ber.ClassUniversal, ber.TypePrimitive, ber.TagOctetString, req.NewRDN, "New RDN")) + pkt.AppendChild(ber.NewBoolean(ber.ClassUniversal, ber.TypePrimitive, ber.TagBoolean, req.DeleteOldRDN, "Delete old RDN")) + if req.NewSuperior != "" { + pkt.AppendChild(ber.NewString(ber.ClassContext, ber.TypePrimitive, 0, req.NewSuperior, "New Superior")) + } + + envelope.AppendChild(pkt) + + return nil +} + +// ModifyDN renames the given DN and optionally move to another base (when the "newSup" argument +// to NewModifyDNRequest() is not ""). +func (l *Conn) ModifyDN(m *ModifyDNRequest) error { + msgCtx, err := l.doRequest(m) + if err != nil { + return err + } + defer l.finishMessage(msgCtx) + + packet, err := l.readPacket(msgCtx) + if err != nil { + return err + } + + tag := packet.Children[1].Tag + if tag == ApplicationModifyDNResponse { + err := GetLDAPError(packet) + if err != nil { + return err + } + } else { + l.Debug.Log("Unexpected Response tag", mlog.Uint("tag", tag)) + } + return nil +} diff --git a/vendor/github.com/mattermost/ldap/modify.go b/vendor/github.com/mattermost/ldap/modify.go new file mode 100644 index 00000000..6798ef91 --- /dev/null +++ b/vendor/github.com/mattermost/ldap/modify.go @@ -0,0 +1,151 @@ +// File contains Modify functionality +// +// https://tools.ietf.org/html/rfc4511 +// +// ModifyRequest ::= [APPLICATION 6] SEQUENCE { +// object LDAPDN, +// changes SEQUENCE OF change SEQUENCE { +// operation ENUMERATED { +// add (0), +// delete (1), +// replace (2), +// ... }, +// modification PartialAttribute } } +// +// PartialAttribute ::= SEQUENCE { +// type AttributeDescription, +// vals SET OF value AttributeValue } +// +// AttributeDescription ::= LDAPString +// -- Constrained to +// -- [RFC4512] +// +// AttributeValue ::= OCTET STRING +// + +package ldap + +import ( + ber "github.com/go-asn1-ber/asn1-ber" + "github.com/mattermost/mattermost/server/public/shared/mlog" +) + +// Change operation choices +const ( + AddAttribute = 0 + DeleteAttribute = 1 + ReplaceAttribute = 2 +) + +// PartialAttribute for a ModifyRequest as defined in https://tools.ietf.org/html/rfc4511 +type PartialAttribute struct { + // Type is the type of the partial attribute + Type string + // Vals are the values of the partial attribute + Vals []string +} + +func (p *PartialAttribute) encode() *ber.Packet { + seq := ber.Encode(ber.ClassUniversal, ber.TypeConstructed, ber.TagSequence, nil, "PartialAttribute") + seq.AppendChild(ber.NewString(ber.ClassUniversal, ber.TypePrimitive, ber.TagOctetString, p.Type, "Type")) + set := ber.Encode(ber.ClassUniversal, ber.TypeConstructed, ber.TagSet, nil, "AttributeValue") + for _, value := range p.Vals { + set.AppendChild(ber.NewString(ber.ClassUniversal, ber.TypePrimitive, ber.TagOctetString, value, "Vals")) + } + seq.AppendChild(set) + return seq +} + +// Change for a ModifyRequest as defined in https://tools.ietf.org/html/rfc4511 +type Change struct { + // Operation is the type of change to be made + Operation uint + // Modification is the attribute to be modified + Modification PartialAttribute +} + +func (c *Change) encode() *ber.Packet { + change := ber.Encode(ber.ClassUniversal, ber.TypeConstructed, ber.TagSequence, nil, "Change") + change.AppendChild(ber.NewInteger(ber.ClassUniversal, ber.TypePrimitive, ber.TagEnumerated, uint64(c.Operation), "Operation")) + change.AppendChild(c.Modification.encode()) + return change +} + +// ModifyRequest as defined in https://tools.ietf.org/html/rfc4511 +type ModifyRequest struct { + // DN is the distinguishedName of the directory entry to modify + DN string + // Changes contain the attributes to modify + Changes []Change + // Controls hold optional controls to send with the request + Controls []Control +} + +// Add appends the given attribute to the list of changes to be made +func (req *ModifyRequest) Add(attrType string, attrVals []string) { + req.appendChange(AddAttribute, attrType, attrVals) +} + +// Delete appends the given attribute to the list of changes to be made +func (req *ModifyRequest) Delete(attrType string, attrVals []string) { + req.appendChange(DeleteAttribute, attrType, attrVals) +} + +// Replace appends the given attribute to the list of changes to be made +func (req *ModifyRequest) Replace(attrType string, attrVals []string) { + req.appendChange(ReplaceAttribute, attrType, attrVals) +} + +func (req *ModifyRequest) appendChange(operation uint, attrType string, attrVals []string) { + req.Changes = append(req.Changes, Change{operation, PartialAttribute{Type: attrType, Vals: attrVals}}) +} + +func (req *ModifyRequest) appendTo(envelope *ber.Packet) error { + pkt := ber.Encode(ber.ClassApplication, ber.TypeConstructed, ApplicationModifyRequest, nil, "Modify Request") + pkt.AppendChild(ber.NewString(ber.ClassUniversal, ber.TypePrimitive, ber.TagOctetString, req.DN, "DN")) + changes := ber.Encode(ber.ClassUniversal, ber.TypeConstructed, ber.TagSequence, nil, "Changes") + for _, change := range req.Changes { + changes.AppendChild(change.encode()) + } + pkt.AppendChild(changes) + + envelope.AppendChild(pkt) + if len(req.Controls) > 0 { + envelope.AppendChild(encodeControls(req.Controls)) + } + + return nil +} + +// NewModifyRequest creates a modify request for the given DN +func NewModifyRequest(dn string, controls []Control) *ModifyRequest { + return &ModifyRequest{ + DN: dn, + Controls: controls, + } +} + +// Modify performs the ModifyRequest +func (l *Conn) Modify(modifyRequest *ModifyRequest) error { + msgCtx, err := l.doRequest(modifyRequest) + if err != nil { + return err + } + defer l.finishMessage(msgCtx) + + packet, err := l.readPacket(msgCtx) + if err != nil { + return err + } + + tag := packet.Children[1].Tag + if tag == ApplicationModifyResponse { + err := GetLDAPError(packet) + if err != nil { + return err + } + } else { + l.Debug.Log("Unexpected Response tag", mlog.Uint("tag", tag)) + } + return nil +} diff --git a/vendor/github.com/mattermost/ldap/passwdmodify.go b/vendor/github.com/mattermost/ldap/passwdmodify.go new file mode 100644 index 00000000..135554d9 --- /dev/null +++ b/vendor/github.com/mattermost/ldap/passwdmodify.go @@ -0,0 +1,131 @@ +// This file contains the password modify extended operation as specified in rfc 3062 +// +// https://tools.ietf.org/html/rfc3062 +// + +package ldap + +import ( + "fmt" + + ber "github.com/go-asn1-ber/asn1-ber" +) + +const ( + passwordModifyOID = "1.3.6.1.4.1.4203.1.11.1" +) + +// PasswordModifyRequest implements the Password Modify Extended Operation as defined in https://www.ietf.org/rfc/rfc3062.txt +type PasswordModifyRequest struct { + // UserIdentity is an optional string representation of the user associated with the request. + // This string may or may not be an LDAPDN [RFC2253]. + // If no UserIdentity field is present, the request acts up upon the password of the user currently associated with the LDAP session + UserIdentity string + // OldPassword, if present, contains the user's current password + OldPassword string + // NewPassword, if present, contains the desired password for this user + NewPassword string +} + +// PasswordModifyResult holds the server response to a PasswordModifyRequest +type PasswordModifyResult struct { + // GeneratedPassword holds a password generated by the server, if present + GeneratedPassword string + // Referral are the returned referral + Referral string +} + +func (req *PasswordModifyRequest) appendTo(envelope *ber.Packet) error { + pkt := ber.Encode(ber.ClassApplication, ber.TypeConstructed, ApplicationExtendedRequest, nil, "Password Modify Extended Operation") + pkt.AppendChild(ber.NewString(ber.ClassContext, ber.TypePrimitive, 0, passwordModifyOID, "Extended Request Name: Password Modify OID")) + + extendedRequestValue := ber.Encode(ber.ClassContext, ber.TypePrimitive, 1, nil, "Extended Request Value: Password Modify Request") + passwordModifyRequestValue := ber.Encode(ber.ClassUniversal, ber.TypeConstructed, ber.TagSequence, nil, "Password Modify Request") + if req.UserIdentity != "" { + passwordModifyRequestValue.AppendChild(ber.NewString(ber.ClassContext, ber.TypePrimitive, 0, req.UserIdentity, "User Identity")) + } + if req.OldPassword != "" { + passwordModifyRequestValue.AppendChild(ber.NewString(ber.ClassContext, ber.TypePrimitive, 1, req.OldPassword, "Old Password")) + } + if req.NewPassword != "" { + passwordModifyRequestValue.AppendChild(ber.NewString(ber.ClassContext, ber.TypePrimitive, 2, req.NewPassword, "New Password")) + } + extendedRequestValue.AppendChild(passwordModifyRequestValue) + + pkt.AppendChild(extendedRequestValue) + + envelope.AppendChild(pkt) + + return nil +} + +// NewPasswordModifyRequest creates a new PasswordModifyRequest +// +// According to the RFC 3602: +// userIdentity is a string representing the user associated with the request. +// This string may or may not be an LDAPDN (RFC 2253). +// If userIdentity is empty then the operation will act on the user associated +// with the session. +// +// oldPassword is the current user's password, it can be empty or it can be +// needed depending on the session user access rights (usually an administrator +// can change a user's password without knowing the current one) and the +// password policy (see pwdSafeModify password policy's attribute) +// +// newPassword is the desired user's password. If empty the server can return +// an error or generate a new password that will be available in the +// PasswordModifyResult.GeneratedPassword +// +func NewPasswordModifyRequest(userIdentity string, oldPassword string, newPassword string) *PasswordModifyRequest { + return &PasswordModifyRequest{ + UserIdentity: userIdentity, + OldPassword: oldPassword, + NewPassword: newPassword, + } +} + +// PasswordModify performs the modification request +func (l *Conn) PasswordModify(passwordModifyRequest *PasswordModifyRequest) (*PasswordModifyResult, error) { + msgCtx, err := l.doRequest(passwordModifyRequest) + if err != nil { + return nil, err + } + defer l.finishMessage(msgCtx) + + packet, err := l.readPacket(msgCtx) + if err != nil { + return nil, err + } + + result := &PasswordModifyResult{} + + if packet.Children[1].Tag == ApplicationExtendedResponse { + err := GetLDAPError(packet) + if err != nil { + if IsErrorWithCode(err, LDAPResultReferral) { + for _, child := range packet.Children[1].Children { + if child.Tag == 3 { + result.Referral = child.Children[0].Value.(string) + } + } + } + return result, err + } + } else { + return nil, NewError(ErrorUnexpectedResponse, fmt.Errorf("unexpected Response: %d", packet.Children[1].Tag)) + } + + extendedResponse := packet.Children[1] + for _, child := range extendedResponse.Children { + if child.Tag == 11 { + passwordModifyResponseValue := ber.DecodePacket(child.Data.Bytes()) + if len(passwordModifyResponseValue.Children) == 1 { + if passwordModifyResponseValue.Children[0].Tag == 0 { + result.GeneratedPassword = ber.DecodeString(passwordModifyResponseValue.Children[0].Data.Bytes()) + } + } + } + } + + return result, nil +} diff --git a/vendor/github.com/mattermost/ldap/request.go b/vendor/github.com/mattermost/ldap/request.go new file mode 100644 index 00000000..fd5f20bf --- /dev/null +++ b/vendor/github.com/mattermost/ldap/request.go @@ -0,0 +1,66 @@ +package ldap + +import ( + "errors" + + ber "github.com/go-asn1-ber/asn1-ber" + "github.com/mattermost/mattermost/server/public/shared/mlog" +) + +var ( + errRespChanClosed = errors.New("ldap: response channel closed") + errCouldNotRetMsg = errors.New("ldap: could not retrieve message") +) + +type request interface { + appendTo(*ber.Packet) error +} + +type requestFunc func(*ber.Packet) error + +func (f requestFunc) appendTo(p *ber.Packet) error { + return f(p) +} + +func (l *Conn) doRequest(req request) (*messageContext, error) { + packet := ber.Encode(ber.ClassUniversal, ber.TypeConstructed, ber.TagSequence, nil, "LDAP Request") + packet.AppendChild(ber.NewInteger(ber.ClassUniversal, ber.TypePrimitive, ber.TagInteger, l.nextMessageID(), "MessageID")) + if err := req.appendTo(packet); err != nil { + return nil, err + } + + l.Debug.Log("Sending package", PacketToField(packet)) + + msgCtx, err := l.sendMessage(packet) + if err != nil { + return nil, err + } + + l.Debug.Log("Send package", mlog.Int("id", msgCtx.id)) + return msgCtx, nil +} + +func (l *Conn) readPacket(msgCtx *messageContext) (*ber.Packet, error) { + l.Debug.Log("Waiting for response", mlog.Int("id", msgCtx.id)) + packetResponse, ok := <-msgCtx.responses + if !ok { + return nil, NewError(ErrorNetwork, errRespChanClosed) + } + packet, err := packetResponse.ReadPacket() + if l.Debug.Enabled() { + if err := addLDAPDescriptions(packet); err != nil { + return nil, err + } + l.Debug.Log("Got response", mlog.Int("id", msgCtx.id), PacketToField(packet), mlog.Err(err)) + } + + if err != nil { + return nil, err + } + + if packet == nil { + return nil, NewError(ErrorNetwork, errCouldNotRetMsg) + } + + return packet, nil +} diff --git a/vendor/github.com/mattermost/ldap/search.go b/vendor/github.com/mattermost/ldap/search.go new file mode 100644 index 00000000..3849212c --- /dev/null +++ b/vendor/github.com/mattermost/ldap/search.go @@ -0,0 +1,421 @@ +// File contains Search functionality +// +// https://tools.ietf.org/html/rfc4511 +// +// SearchRequest ::= [APPLICATION 3] SEQUENCE { +// baseObject LDAPDN, +// scope ENUMERATED { +// baseObject (0), +// singleLevel (1), +// wholeSubtree (2), +// ... }, +// derefAliases ENUMERATED { +// neverDerefAliases (0), +// derefInSearching (1), +// derefFindingBaseObj (2), +// derefAlways (3) }, +// sizeLimit INTEGER (0 .. maxInt), +// timeLimit INTEGER (0 .. maxInt), +// typesOnly BOOLEAN, +// filter Filter, +// attributes AttributeSelection } +// +// AttributeSelection ::= SEQUENCE OF selector LDAPString +// -- The LDAPString is constrained to +// -- in Section 4.5.1.8 +// +// Filter ::= CHOICE { +// and [0] SET SIZE (1..MAX) OF filter Filter, +// or [1] SET SIZE (1..MAX) OF filter Filter, +// not [2] Filter, +// equalityMatch [3] AttributeValueAssertion, +// substrings [4] SubstringFilter, +// greaterOrEqual [5] AttributeValueAssertion, +// lessOrEqual [6] AttributeValueAssertion, +// present [7] AttributeDescription, +// approxMatch [8] AttributeValueAssertion, +// extensibleMatch [9] MatchingRuleAssertion, +// ... } +// +// SubstringFilter ::= SEQUENCE { +// type AttributeDescription, +// substrings SEQUENCE SIZE (1..MAX) OF substring CHOICE { +// initial [0] AssertionValue, -- can occur at most once +// any [1] AssertionValue, +// final [2] AssertionValue } -- can occur at most once +// } +// +// MatchingRuleAssertion ::= SEQUENCE { +// matchingRule [1] MatchingRuleId OPTIONAL, +// type [2] AttributeDescription OPTIONAL, +// matchValue [3] AssertionValue, +// dnAttributes [4] BOOLEAN DEFAULT FALSE } +// +// + +package ldap + +import ( + "errors" + "fmt" + "sort" + "strings" + + ber "github.com/go-asn1-ber/asn1-ber" +) + +// scope choices +const ( + ScopeBaseObject = 0 + ScopeSingleLevel = 1 + ScopeWholeSubtree = 2 +) + +// ScopeMap contains human readable descriptions of scope choices +var ScopeMap = map[int]string{ + ScopeBaseObject: "Base Object", + ScopeSingleLevel: "Single Level", + ScopeWholeSubtree: "Whole Subtree", +} + +// derefAliases +const ( + NeverDerefAliases = 0 + DerefInSearching = 1 + DerefFindingBaseObj = 2 + DerefAlways = 3 +) + +// DerefMap contains human readable descriptions of derefAliases choices +var DerefMap = map[int]string{ + NeverDerefAliases: "NeverDerefAliases", + DerefInSearching: "DerefInSearching", + DerefFindingBaseObj: "DerefFindingBaseObj", + DerefAlways: "DerefAlways", +} + +// NewEntry returns an Entry object with the specified distinguished name and attribute key-value pairs. +// The map of attributes is accessed in alphabetical order of the keys in order to ensure that, for the +// same input map of attributes, the output entry will contain the same order of attributes +func NewEntry(dn string, attributes map[string][]string) *Entry { + var attributeNames []string + for attributeName := range attributes { + attributeNames = append(attributeNames, attributeName) + } + sort.Strings(attributeNames) + + var encodedAttributes []*EntryAttribute + for _, attributeName := range attributeNames { + encodedAttributes = append(encodedAttributes, NewEntryAttribute(attributeName, attributes[attributeName])) + } + return &Entry{ + DN: dn, + Attributes: encodedAttributes, + } +} + +// Entry represents a single search result entry +type Entry struct { + // DN is the distinguished name of the entry + DN string + // Attributes are the returned attributes for the entry + Attributes []*EntryAttribute +} + +// GetAttributeValues returns the values for the named attribute, or an empty list +func (e *Entry) GetAttributeValues(attribute string) []string { + for _, attr := range e.Attributes { + if attr.Name == attribute { + return attr.Values + } + } + return []string{} +} + +// GetRawAttributeValues returns the byte values for the named attribute, or an empty list +func (e *Entry) GetRawAttributeValues(attribute string) [][]byte { + for _, attr := range e.Attributes { + if attr.Name == attribute { + return attr.ByteValues + } + } + return [][]byte{} +} + +// GetAttributeValue returns the first value for the named attribute, or "" +func (e *Entry) GetAttributeValue(attribute string) string { + values := e.GetAttributeValues(attribute) + if len(values) == 0 { + return "" + } + return values[0] +} + +// GetRawAttributeValue returns the first value for the named attribute, or an empty slice +func (e *Entry) GetRawAttributeValue(attribute string) []byte { + values := e.GetRawAttributeValues(attribute) + if len(values) == 0 { + return []byte{} + } + return values[0] +} + +// Print outputs a human-readable description +func (e *Entry) Print() { + fmt.Printf("DN: %s\n", e.DN) + for _, attr := range e.Attributes { + attr.Print() + } +} + +// PrettyPrint outputs a human-readable description indenting +func (e *Entry) PrettyPrint(indent int) { + fmt.Printf("%sDN: %s\n", strings.Repeat(" ", indent), e.DN) + for _, attr := range e.Attributes { + attr.PrettyPrint(indent + 2) + } +} + +// NewEntryAttribute returns a new EntryAttribute with the desired key-value pair +func NewEntryAttribute(name string, values []string) *EntryAttribute { + var bytes [][]byte + for _, value := range values { + bytes = append(bytes, []byte(value)) + } + return &EntryAttribute{ + Name: name, + Values: values, + ByteValues: bytes, + } +} + +// EntryAttribute holds a single attribute +type EntryAttribute struct { + // Name is the name of the attribute + Name string + // Values contain the string values of the attribute + Values []string + // ByteValues contain the raw values of the attribute + ByteValues [][]byte +} + +// Print outputs a human-readable description +func (e *EntryAttribute) Print() { + fmt.Printf("%s: %s\n", e.Name, e.Values) +} + +// PrettyPrint outputs a human-readable description with indenting +func (e *EntryAttribute) PrettyPrint(indent int) { + fmt.Printf("%s%s: %s\n", strings.Repeat(" ", indent), e.Name, e.Values) +} + +// SearchResult holds the server's response to a search request +type SearchResult struct { + // Entries are the returned entries + Entries []*Entry + // Referrals are the returned referrals + Referrals []string + // Controls are the returned controls + Controls []Control +} + +// Print outputs a human-readable description +func (s *SearchResult) Print() { + for _, entry := range s.Entries { + entry.Print() + } +} + +// PrettyPrint outputs a human-readable description with indenting +func (s *SearchResult) PrettyPrint(indent int) { + for _, entry := range s.Entries { + entry.PrettyPrint(indent) + } +} + +// SearchRequest represents a search request to send to the server +type SearchRequest struct { + BaseDN string + Scope int + DerefAliases int + SizeLimit int + TimeLimit int + TypesOnly bool + Filter string + Attributes []string + Controls []Control +} + +func (req *SearchRequest) appendTo(envelope *ber.Packet) error { + pkt := ber.Encode(ber.ClassApplication, ber.TypeConstructed, ApplicationSearchRequest, nil, "Search Request") + pkt.AppendChild(ber.NewString(ber.ClassUniversal, ber.TypePrimitive, ber.TagOctetString, req.BaseDN, "Base DN")) + pkt.AppendChild(ber.NewInteger(ber.ClassUniversal, ber.TypePrimitive, ber.TagEnumerated, uint64(req.Scope), "Scope")) + pkt.AppendChild(ber.NewInteger(ber.ClassUniversal, ber.TypePrimitive, ber.TagEnumerated, uint64(req.DerefAliases), "Deref Aliases")) + pkt.AppendChild(ber.NewInteger(ber.ClassUniversal, ber.TypePrimitive, ber.TagInteger, uint64(req.SizeLimit), "Size Limit")) + pkt.AppendChild(ber.NewInteger(ber.ClassUniversal, ber.TypePrimitive, ber.TagInteger, uint64(req.TimeLimit), "Time Limit")) + pkt.AppendChild(ber.NewBoolean(ber.ClassUniversal, ber.TypePrimitive, ber.TagBoolean, req.TypesOnly, "Types Only")) + // compile and encode filter + filterPacket, err := CompileFilter(req.Filter) + if err != nil { + return err + } + pkt.AppendChild(filterPacket) + // encode attributes + attributesPacket := ber.Encode(ber.ClassUniversal, ber.TypeConstructed, ber.TagSequence, nil, "Attributes") + for _, attribute := range req.Attributes { + attributesPacket.AppendChild(ber.NewString(ber.ClassUniversal, ber.TypePrimitive, ber.TagOctetString, attribute, "Attribute")) + } + pkt.AppendChild(attributesPacket) + + envelope.AppendChild(pkt) + if len(req.Controls) > 0 { + envelope.AppendChild(encodeControls(req.Controls)) + } + + return nil +} + +// NewSearchRequest creates a new search request +func NewSearchRequest( + BaseDN string, + Scope, DerefAliases, SizeLimit, TimeLimit int, + TypesOnly bool, + Filter string, + Attributes []string, + Controls []Control, +) *SearchRequest { + return &SearchRequest{ + BaseDN: BaseDN, + Scope: Scope, + DerefAliases: DerefAliases, + SizeLimit: SizeLimit, + TimeLimit: TimeLimit, + TypesOnly: TypesOnly, + Filter: Filter, + Attributes: Attributes, + Controls: Controls, + } +} + +// SearchWithPaging accepts a search request and desired page size in order to execute LDAP queries to fulfill the +// search request. All paged LDAP query responses will be buffered and the final result will be returned atomically. +// The following four cases are possible given the arguments: +// - given SearchRequest missing a control of type ControlTypePaging: we will add one with the desired paging size +// - given SearchRequest contains a control of type ControlTypePaging that isn't actually a ControlPaging: fail without issuing any queries +// - given SearchRequest contains a control of type ControlTypePaging with pagingSize equal to the size requested: no change to the search request +// - given SearchRequest contains a control of type ControlTypePaging with pagingSize not equal to the size requested: fail without issuing any queries +// +// A requested pagingSize of 0 is interpreted as no limit by LDAP servers. +func (l *Conn) SearchWithPaging(searchRequest *SearchRequest, pagingSize uint32) (*SearchResult, error) { + var pagingControl *ControlPaging + + control := FindControl(searchRequest.Controls, ControlTypePaging) + if control == nil { + pagingControl = NewControlPaging(pagingSize) + searchRequest.Controls = append(searchRequest.Controls, pagingControl) + } else { + castControl, ok := control.(*ControlPaging) + if !ok { + return nil, fmt.Errorf("expected paging control to be of type *ControlPaging, got %v", control) + } + if castControl.PagingSize != pagingSize { + return nil, fmt.Errorf("paging size given in search request (%d) conflicts with size given in search call (%d)", castControl.PagingSize, pagingSize) + } + pagingControl = castControl + } + + searchResult := new(SearchResult) + for { + result, err := l.Search(searchRequest) + if err != nil { + return searchResult, err + } + if result == nil { + return searchResult, NewError(ErrorNetwork, errors.New("ldap: packet not received")) + } + + for _, entry := range result.Entries { + searchResult.Entries = append(searchResult.Entries, entry) + } + for _, referral := range result.Referrals { + searchResult.Referrals = append(searchResult.Referrals, referral) + } + for _, control := range result.Controls { + searchResult.Controls = append(searchResult.Controls, control) + } + + pagingResult := FindControl(result.Controls, ControlTypePaging) + if pagingResult == nil { + pagingControl = nil + break + } + + cookie := pagingResult.(*ControlPaging).Cookie + if len(cookie) == 0 { + pagingControl = nil + break + } + pagingControl.SetCookie(cookie) + } + + if pagingControl != nil { + pagingControl.PagingSize = 0 + l.Search(searchRequest) + } + + return searchResult, nil +} + +// Search performs the given search request +func (l *Conn) Search(searchRequest *SearchRequest) (*SearchResult, error) { + msgCtx, err := l.doRequest(searchRequest) + if err != nil { + return nil, err + } + defer l.finishMessage(msgCtx) + + result := &SearchResult{ + Entries: make([]*Entry, 0), + Referrals: make([]string, 0), + Controls: make([]Control, 0)} + + for { + packet, err := l.readPacket(msgCtx) + if err != nil { + return nil, err + } + + switch packet.Children[1].Tag { + case 4: + entry := new(Entry) + entry.DN = packet.Children[1].Children[0].Value.(string) + for _, child := range packet.Children[1].Children[1].Children { + attr := new(EntryAttribute) + attr.Name = child.Children[0].Value.(string) + for _, value := range child.Children[1].Children { + attr.Values = append(attr.Values, value.Value.(string)) + attr.ByteValues = append(attr.ByteValues, value.ByteValue) + } + entry.Attributes = append(entry.Attributes, attr) + } + result.Entries = append(result.Entries, entry) + case 5: + err := GetLDAPError(packet) + if err != nil { + return nil, err + } + if len(packet.Children) == 3 { + for _, child := range packet.Children[2].Children { + decodedChild, err := DecodeControl(child) + if err != nil { + return nil, fmt.Errorf("failed to decode child control: %s", err) + } + result.Controls = append(result.Controls, decodedChild) + } + } + return result, nil + case 19: + result.Referrals = append(result.Referrals, packet.Children[1].Children[0].Value.(string)) + } + } +} diff --git a/vendor/github.com/mattermost/logr/v2/.gitignore b/vendor/github.com/mattermost/logr/v2/.gitignore new file mode 100644 index 00000000..31317672 --- /dev/null +++ b/vendor/github.com/mattermost/logr/v2/.gitignore @@ -0,0 +1,42 @@ +# Binaries for programs and plugins +*.exe +*.dll +*.so +*.dylib +debug +dynip + +# Test binary, build with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# Output of profiler +*.prof + +# Project-local glide cache, RE: https://github.com/Masterminds/glide/issues/736 +.glide/ + +# IntelliJ config +.idea + +# log files +*.log + +# transient directories +vendor +output +build +app +logs + +# test apps +test/cmd/testapp1/testapp1 +test/cmd/simple/simple +test/cmd/gelf/gelf + +# tools +.aider* +!.aider.conf.yml +!.aiderignore \ No newline at end of file diff --git a/vendor/github.com/mattermost/logr/v2/LICENSE b/vendor/github.com/mattermost/logr/v2/LICENSE new file mode 100644 index 00000000..3bea6788 --- /dev/null +++ b/vendor/github.com/mattermost/logr/v2/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2019 wiggin77 + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/mattermost/logr/v2/README.md b/vendor/github.com/mattermost/logr/v2/README.md new file mode 100644 index 00000000..69558e79 --- /dev/null +++ b/vendor/github.com/mattermost/logr/v2/README.md @@ -0,0 +1,205 @@ +![Logr_Logo](https://user-images.githubusercontent.com/7295363/200433587-ae9df127-9427-4753-a0a0-85723a216e0e.png) + +> A fully asynchronous, contextual logger for Go. + +# logr + +[![GoDoc](https://godoc.org/github.com/mattermost/logr?status.svg)](http://godoc.org/github.com/mattermost/logr) +[![Report Card](https://goreportcard.com/badge/github.com/mattermost/logr)](https://goreportcard.com/report/github.com/mattermost/logr) + +Logr is inspired by [Logrus](https://github.com/sirupsen/logrus) and [Zap](https://github.com/uber-go/zap) but addresses a number of issues: + +1. Logr is fully asynchronous, meaning that all formatting and writing is done in the background. Latency sensitive applications benefit from not waiting for logging to complete. + +2. Logr provides custom filters which provide more flexibility than Trace, Debug, Info... levels. If you need to temporarily increase verbosity of logging while tracking down a problem you can avoid the fire-hose that typically comes from Debug or Trace by using custom filters. + +3. Logr generates much less allocations than Logrus, and is close to Zap in allocations. + +## Concepts + + +| entity | description | +| ------ | ----------- | +| Logr | Engine instance typically instantiated once; used to configure logging.
```lgr,_ := logr.New()```| +| Logger | Provides contextual logging via fields; lightweight, can be created once and accessed globally, or created on demand.
```logger := lgr.NewLogger()```
```logger2 := logger.With(logr.String("user", "Sam"))```| +| Target | A destination for log items such as console, file, database or just about anything that can be written to. Each target has its own filter/level and formatter, and any number of targets can be added to a Logr. Targets for file, syslog and any io.Writer are built-in and it is easy to create your own. You can also use any [Logrus hooks](https://github.com/sirupsen/logrus/wiki/Hooks) via a simple [adapter](https://github.com/wiggin77/logrus4logr).| +| Filter | Determines which logging calls get written versus filtered out. Also determines which logging calls generate a stack trace.
```filter := &logr.StdFilter{Lvl: logr.Warn, Stacktrace: logr.Fatal}```| +| Formatter | Formats the output. Logr includes built-in formatters for JSON and plain text with delimiters. It is easy to create your own formatters or you can also use any [Logrus formatters](https://github.com/sirupsen/logrus#formatters) via a simple [adapter](https://github.com/wiggin77/logrus4logr).
```formatter := &format.Plain{Delim: " \| "}```| + +## Usage + +```go +// Create Logr instance. +lgr,_ := logr.New() + +// Create a filter and formatter. Both can be shared by multiple +// targets. +filter := &logr.StdFilter{Lvl: logr.Warn, Stacktrace: logr.Error} +formatter := &formatters.Plain{Delim: " | "} + +// WriterTarget outputs to any io.Writer +t := targets.NewWriterTarget(filter, formatter, os.StdOut, 1000) +lgr.AddTarget(t) + +// One or more Loggers can be created, shared, used concurrently, +// or created on demand. +logger := lgr.NewLogger().With("user", "Sarah") + +// Now we can log to the target(s). +logger.Debug("login attempt") +logger.Error("login failed") + +// Ensure targets are drained before application exit. +lgr.Shutdown() +``` + +## Fields + +Fields allow for contextual logging, meaning information can be added to log statements without changing the statements themselves. Information can be shared across multiple logging statements thus allowing log analysis tools to group them. + +Fields can be added to a Logger via `Logger.With` or included with each log record: + +```go +lgr,_ := logr.New() +// ... add targets ... +logger := lgr.NewLogger().With( + logr.Any("user": user), + logr.String("role", role) +) + +logger.Info("login attempt", logr.Int("attempt_count", count)) +// ... later ... +logger.Info("login", logr.String("result", result)) +``` + +Logr fields are inspired by and work the same as [Zap fields](https://pkg.go.dev/go.uber.org/zap#Field). + +## Filters + +Logr supports the traditional seven log levels via `logr.StdFilter`: Panic, Fatal, Error, Warning, Info, Debug, and Trace. + +```go +// When added to a target, this filter will only allow +// log statements with level severity Warn or higher. +// It will also generate stack traces for Error or higher. +filter := &logr.StdFilter{Lvl: logr.Warn, Stacktrace: logr.Error} +``` + +Logr also supports custom filters (logr.CustomFilter) which allow fine grained inclusion of log items without turning on the fire-hose. + +```go + // create custom levels; use IDs > 10. + LoginLevel := logr.Level{ID: 100, Name: "login ", Stacktrace: false} + LogoutLevel := logr.Level{ID: 101, Name: "logout", Stacktrace: false} + + lgr,_ := logr.New() + + // create a custom filter with custom levels. + filter := &logr.CustomFilter{} + filter.Add(LoginLevel, LogoutLevel) + + formatter := &formatters.Plain{Delim: " | "} + tgr := targets.NewWriterTarget(filter, formatter, os.StdOut, 1000) + lgr.AddTarget(tgr) + logger := lgr.NewLogger().With(logr.String("user": "Bob"), logr.String("role": "admin")) + + logger.Log(LoginLevel, "this item will get logged") + logger.Debug("won't be logged since Debug wasn't added to custom filter") +``` + +Both filter types allow you to determine which levels force a stack trace to be output. Note that generating stack traces cannot happen fully asynchronously and thus add some latency to the calling goroutine. + +## Targets + +There are built-in targets for outputting to syslog, file, TCP, or any `io.Writer`. More will be added. + +You can use any [Logrus hooks](https://github.com/sirupsen/logrus/wiki/Hooks) via a simple [adapter](https://github.com/wiggin77/logrus4logr). + +You can create your own target by implementing the simple [Target](./target.go) interface. + +Example target that outputs to `io.Writer`: + +```go +type Writer struct { + out io.Writer +} + +func NewWriterTarget(out io.Writer) *Writer { + w := &Writer{out: out} + return w +} + +// Called once to initialize target. +func (w *Writer) Init() error { + return nil +} + +// Write will always be called by a single internal Logr goroutine, so no locking needed. +func (w *Writer) Write(p []byte, rec *logr.LogRec) (int, error) { + return w.out.Write(buf.Bytes()) +} + +// Called once to cleanup/free resources for target. +func (w *Writer) Shutdown() error { + return nil +} +``` + +## Formatters + +Logr has two built-in formatters, one for JSON and the other plain, delimited text. + +You can use any [Logrus formatters](https://github.com/sirupsen/logrus#formatters) via a simple [adapter](https://github.com/wiggin77/logrus4logr). + +You can create your own formatter by implementing the [Formatter](./formatter.go) interface: + +```go +Format(rec *LogRec, stacktrace bool, buf *bytes.Buffer) (*bytes.Buffer, error) +``` + +## Configuration options + +When creating the Logr instance, you can set configuration options. For example: + +```go +lgr, err := logr.New( + logr.MaxQueueSize(1000), + logr.StackFilter("mypackage1", "mypackage2"), +) +``` + +Some options are documented below. See [options.go](./options.go) for all available configuration options. + +### ```Logr.OnLoggerError(err error)``` + +Called any time an internal logging error occurs. For example, this can happen when a target cannot connect to its data sink. + +It may be tempting to log this error, however there is a danger that logging this will simply generate another error and so on. If you must log it, use a target and custom level specifically for this event and ensure it cannot generate more errors. + +### ```Logr.OnQueueFull func(rec *LogRec, maxQueueSize int) bool``` + +Called on an attempt to add a log record to a full Logr queue. This generally means the Logr maximum queue size is too small, or at least one target is very slow. Logr maximum queue size can be changed before adding any targets via: + +```go +lgr, err := logr.New(logr.MaxQueueSize(2000)) +``` + +Returning true will drop the log record. False will block until the log record can be added, which creates a natural throttle at the expense of latency for the calling goroutine. The default is to block. + +### ```Logr.OnTargetQueueFull func(target Target, rec *LogRec, maxQueueSize int) bool``` + +Called on an attempt to add a log record to a full target queue. This generally means your target's max queue size is too small, or the target is very slow to output. + +As with the Logr queue, returning true will drop the log record. False will block until the log record can be added, which creates a natural throttle at the expense of latency for the calling goroutine. The default is to block. + +### ```Logr.OnExit func(code int) and Logr.OnPanic func(err interface{})``` + +OnExit and OnPanic are called when the Logger.FatalXXX and Logger.PanicXXX functions are called respectively. + +In both cases the default behavior is to shut down gracefully, draining all targets, and calling `os.Exit` or `panic` respectively. + +When adding your own handlers, be sure to call `Logr.Shutdown` before exiting the application to avoid losing log records. + +### ```Logr.StackFilter(pkg ...string)``` + +StackFilter sets a list of package names to exclude from the top of stack traces. The `Logr` packages are automatically filtered. diff --git a/vendor/github.com/mattermost/logr/v2/buffer.go b/vendor/github.com/mattermost/logr/v2/buffer.go new file mode 100644 index 00000000..42bf5255 --- /dev/null +++ b/vendor/github.com/mattermost/logr/v2/buffer.go @@ -0,0 +1,28 @@ +package logr + +import ( + "bytes" + "sync" +) + +// Buffer provides a thread-safe buffer useful for logging to memory in unit tests. +type Buffer struct { + buf bytes.Buffer + mux sync.Mutex +} + +func (b *Buffer) Read(p []byte) (n int, err error) { + b.mux.Lock() + defer b.mux.Unlock() + return b.buf.Read(p) +} +func (b *Buffer) Write(p []byte) (n int, err error) { + b.mux.Lock() + defer b.mux.Unlock() + return b.buf.Write(p) +} +func (b *Buffer) String() string { + b.mux.Lock() + defer b.mux.Unlock() + return b.buf.String() +} diff --git a/vendor/github.com/mattermost/logr/v2/config/config.go b/vendor/github.com/mattermost/logr/v2/config/config.go new file mode 100644 index 00000000..b835a986 --- /dev/null +++ b/vendor/github.com/mattermost/logr/v2/config/config.go @@ -0,0 +1,209 @@ +package config + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "io" + "os" + "strings" + + "github.com/mattermost/logr/v2" + "github.com/mattermost/logr/v2/formatters" + "github.com/mattermost/logr/v2/targets" +) + +type TargetCfg struct { + Type string `json:"type"` // one of "console", "file", "tcp", "syslog", "none". + Options json.RawMessage `json:"options,omitempty"` + Format string `json:"format"` // one of "json", "plain", "gelf" + FormatOptions json.RawMessage `json:"format_options,omitempty"` + Levels []logr.Level `json:"levels"` + MaxQueueSize int `json:"maxqueuesize,omitempty"` +} + +type ConsoleOptions struct { + Out string `json:"out"` // one of "stdout", "stderr" +} + +type TargetFactory func(targetType string, options json.RawMessage) (logr.Target, error) +type FormatterFactory func(format string, options json.RawMessage) (logr.Formatter, error) + +type Factories struct { + TargetFactory TargetFactory // can be nil + FormatterFactory FormatterFactory // can be nil +} + +var removeAll = func(ti logr.TargetInfo) bool { return true } + +// ConfigureTargets replaces the current list of log targets with a new one based on a map +// of name->TargetCfg. The map of TargetCfg's would typically be serialized from a JSON +// source or can be programmatically created. +// +// An optional set of factories can be provided which will be called to create any target +// types or formatters not built-in. +// +// To append log targets to an existing config, use `(*Logr).AddTarget` or +// `(*Logr).AddTargetFromConfig` instead. +func ConfigureTargets(lgr *logr.Logr, config map[string]TargetCfg, factories *Factories) error { + if err := lgr.RemoveTargets(context.Background(), removeAll); err != nil { + return fmt.Errorf("error removing existing log targets: %w", err) + } + + if factories == nil { + factories = &Factories{nil, nil} + } + + for name, tcfg := range config { + target, err := newTarget(tcfg.Type, tcfg.Options, factories.TargetFactory) + if err != nil { + return fmt.Errorf("error creating log target %s: %w", name, err) + } + + if target == nil { + continue + } + + formatter, err := newFormatter(tcfg.Format, tcfg.FormatOptions, factories.FormatterFactory) + if err != nil { + return fmt.Errorf("error creating formatter for log target %s: %w", name, err) + } + + filter := newFilter(tcfg.Levels) + qSize := tcfg.MaxQueueSize + if qSize == 0 { + qSize = logr.DefaultMaxQueueSize + } + + if err = lgr.AddTarget(target, name, filter, formatter, qSize); err != nil { + return fmt.Errorf("error adding log target %s: %w", name, err) + } + } + return nil +} + +func newFilter(levels []logr.Level) logr.Filter { + filter := &logr.CustomFilter{} + for _, lvl := range levels { + filter.Add(lvl) + } + return filter +} + +func newTarget(targetType string, options json.RawMessage, factory TargetFactory) (logr.Target, error) { + switch strings.ToLower(targetType) { + case "console": + c := ConsoleOptions{} + if len(options) != 0 { + if err := json.Unmarshal(options, &c); err != nil { + return nil, fmt.Errorf("error decoding console target options: %w", err) + } + } + var w io.Writer + switch c.Out { + case "stderr": + w = os.Stderr + case "stdout", "": + w = os.Stdout + default: + return nil, fmt.Errorf("invalid console target option '%s'", c.Out) + } + return targets.NewWriterTarget(w), nil + case "file": + fo := targets.FileOptions{} + if len(options) == 0 { + return nil, errors.New("missing file target options") + } + if err := json.Unmarshal(options, &fo); err != nil { + return nil, fmt.Errorf("error decoding file target options: %w", err) + } + if err := fo.CheckValid(); err != nil { + return nil, fmt.Errorf("invalid file target options: %w", err) + } + return targets.NewFileTarget(fo), nil + case "tcp": + to := targets.TcpOptions{} + if len(options) == 0 { + return nil, errors.New("missing TCP target options") + } + if err := json.Unmarshal(options, &to); err != nil { + return nil, fmt.Errorf("error decoding TCP target options: %w", err) + } + if err := to.CheckValid(); err != nil { + return nil, fmt.Errorf("invalid TCP target options: %w", err) + } + return targets.NewTcpTarget(&to), nil + case "syslog": + so := targets.SyslogOptions{} + if len(options) == 0 { + return nil, errors.New("missing SysLog target options") + } + if err := json.Unmarshal(options, &so); err != nil { + return nil, fmt.Errorf("error decoding Syslog target options: %w", err) + } + if err := so.CheckValid(); err != nil { + return nil, fmt.Errorf("invalid SysLog target options: %w", err) + } + return targets.NewSyslogTarget(&so) + case "none": + return nil, nil + default: + if factory != nil { + t, err := factory(targetType, options) + if err != nil || t == nil { + return nil, fmt.Errorf("error from target factory: %w", err) + } + return t, nil + } + } + return nil, fmt.Errorf("target type '%s' is unrecognized", targetType) +} + +func newFormatter(format string, options json.RawMessage, factory FormatterFactory) (logr.Formatter, error) { + switch strings.ToLower(format) { + case "json": + j := formatters.JSON{} + if len(options) != 0 { + if err := json.Unmarshal(options, &j); err != nil { + return nil, fmt.Errorf("error decoding JSON formatter options: %w", err) + } + if err := j.CheckValid(); err != nil { + return nil, fmt.Errorf("invalid JSON formatter options: %w", err) + } + } + return &j, nil + case "plain": + p := formatters.Plain{} + if len(options) != 0 { + if err := json.Unmarshal(options, &p); err != nil { + return nil, fmt.Errorf("error decoding Plain formatter options: %w", err) + } + if err := p.CheckValid(); err != nil { + return nil, fmt.Errorf("invalid plain formatter options: %w", err) + } + } + return &p, nil + case "gelf": + g := formatters.Gelf{} + if len(options) != 0 { + if err := json.Unmarshal(options, &g); err != nil { + return nil, fmt.Errorf("error decoding Gelf formatter options: %w", err) + } + if err := g.CheckValid(); err != nil { + return nil, fmt.Errorf("invalid GELF formatter options: %w", err) + } + } + return &g, nil + + default: + if factory != nil { + f, err := factory(format, options) + if err != nil || f == nil { + return nil, fmt.Errorf("error from formatter factory: %w", err) + } + return f, nil + } + } + return nil, fmt.Errorf("format '%s' is unrecognized", format) +} diff --git a/vendor/github.com/mattermost/logr/v2/config/sample-config.json b/vendor/github.com/mattermost/logr/v2/config/sample-config.json new file mode 100644 index 00000000..540bafbb --- /dev/null +++ b/vendor/github.com/mattermost/logr/v2/config/sample-config.json @@ -0,0 +1,90 @@ +{ + "sample-console": { + "type": "console", + "options": { + "out": "stdout" + }, + "format": "plain", + "format_options": { + "delim": " | " + }, + "levels": [ + {"id": 5, "name": "debug"}, + {"id": 4, "name": "info"}, + {"id": 3, "name": "warn"}, + {"id": 2, "name": "error", "stacktrace": true}, + {"id": 1, "name": "fatal", "stacktrace": true}, + {"id": 0, "name": "panic", "stacktrace": true} + ], + "maxqueuesize": 1000 + }, + "sample-file": { + "type": "file", + "options": { + "filename": "test.log", + "max_size": 1000000, + "max_age": 1, + "max_backups": 10, + "compress": true + }, + "format": "json", + "format_options": { + }, + "levels": [ + {"id": 5, "name": "debug"}, + {"id": 4, "name": "info"}, + {"id": 3, "name": "warn"}, + {"id": 2, "name": "error", "stacktrace": true}, + {"id": 1, "name": "fatal", "stacktrace": true}, + {"id": 0, "name": "panic", "stacktrace": true} + ], + "maxqueuesize": 1000 + }, + "sample-tcp": { + "type": "tcp", + "options": { + "host": "localhost", + "port": 18066, + "tls": false, + "cert": "", + "insecure": false + }, + "format": "gelf", + "format_options": { + "hostname": "server01" + }, + "levels": [ + {"id": 5, "name": "debug"}, + {"id": 4, "name": "info"}, + {"id": 3, "name": "warn"}, + {"id": 2, "name": "error", "stacktrace": true}, + {"id": 1, "name": "fatal", "stacktrace": true}, + {"id": 0, "name": "panic", "stacktrace": true} + ], + "maxqueuesize": 1000 + }, + "sample-syslog": { + "type": "syslog", + "options": { + "host": "localhost", + "port": 18066, + "tls": false, + "cert": "", + "insecure": false, + "tag": "testapp" + }, + "format": "plain", + "format_options": { + "delim": " " + }, + "levels": [ + {"id": 5, "name": "debug"}, + {"id": 4, "name": "info"}, + {"id": 3, "name": "warn"}, + {"id": 2, "name": "error", "stacktrace": true}, + {"id": 1, "name": "fatal", "stacktrace": true}, + {"id": 0, "name": "panic", "stacktrace": true} + ], + "maxqueuesize": 1000 + } +} diff --git a/vendor/github.com/mattermost/logr/v2/const.go b/vendor/github.com/mattermost/logr/v2/const.go new file mode 100644 index 00000000..ab151bde --- /dev/null +++ b/vendor/github.com/mattermost/logr/v2/const.go @@ -0,0 +1,37 @@ +package logr + +import "time" + +// Defaults. +const ( + // DefaultMaxQueueSize is the default maximum queue size for Logr instances. + DefaultMaxQueueSize = 1000 + + // DefaultMaxStackFrames is the default maximum max number of stack frames collected + // when generating stack traces for logging. + DefaultMaxStackFrames = 30 + + // MaxLevelID is the maximum value of a level ID. Some level cache implementations will + // allocate a cache of this size. Cannot exceed uint. + MaxLevelID = 65535 + + // DefaultEnqueueTimeout is the default amount of time a log record can take to be queued. + // This only applies to blocking enqueue which happen after `logr.OnQueueFull` is called + // and returns false. + DefaultEnqueueTimeout = time.Second * 30 + + // DefaultShutdownTimeout is the default amount of time `logr.Shutdown` can execute before + // timing out. + DefaultShutdownTimeout = time.Second * 30 + + // DefaultFlushTimeout is the default amount of time `logr.Flush` can execute before + // timing out. + DefaultFlushTimeout = time.Second * 30 + + // DefaultMaxPooledBuffer is the maximum size a pooled buffer can be. + // Buffers that grow beyond this size are garbage collected. + DefaultMaxPooledBuffer = 1024 * 1024 + + // DefaultMaxFieldLength is the maximum size of a String or fmt.Stringer field can be. + DefaultMaxFieldLength = -1 +) diff --git a/vendor/github.com/mattermost/logr/v2/field.go b/vendor/github.com/mattermost/logr/v2/field.go new file mode 100644 index 00000000..eedd8e84 --- /dev/null +++ b/vendor/github.com/mattermost/logr/v2/field.go @@ -0,0 +1,415 @@ +package logr + +import ( + "errors" + "fmt" + "io" + "reflect" + "strconv" + "time" +) + +var ( + Comma = []byte{','} + Equals = []byte{'='} + Space = []byte{' '} + Newline = []byte{'\n'} + Quote = []byte{'"'} + Colon = []byte{':'} +) + +// LogCloner is implemented by `Any` types that require a clone to be provided +// to the logger because the original may mutate. +type LogCloner interface { + LogClone() interface{} +} + +// LogWriter is implemented by `Any` types that provide custom formatting for +// log output. A string representation of the type should be written directly to +// the `io.Writer`. +type LogWriter interface { + LogWrite(w io.Writer) error +} + +type FieldType uint8 + +const ( + UnknownType FieldType = iota + StringType + StringerType + StructType + ErrorType + BoolType + TimestampMillisType + TimeType + DurationType + Int64Type + Int32Type + IntType + Uint64Type + Uint32Type + UintType + Float64Type + Float32Type + BinaryType + ArrayType + MapType +) + +type Field struct { + Key string + Type FieldType + Integer int64 + Float float64 + String string + Interface interface{} +} + +func quoteString(w io.Writer, s string, shouldQuote func(s string) bool) error { + b := shouldQuote(s) + if b { + if _, err := w.Write(Quote); err != nil { + return err + } + } + + if _, err := w.Write([]byte(s)); err != nil { + return err + } + + if b { + if _, err := w.Write(Quote); err != nil { + return err + } + } + return nil +} + +// ValueString converts a known type to a string using default formatting. +// This is called lazily by a formatter. +// Formatters can provide custom formatting or types passed via `Any` can implement +// the `LogString` interface to generate output for logging. +// If the optional shouldQuote callback is provided, then it will be called for any +// string output that could potentially need to be quoted. +func (f Field) ValueString(w io.Writer, shouldQuote func(s string) bool) error { + if shouldQuote == nil { + shouldQuote = func(s string) bool { return false } + } + var err error + switch f.Type { + case StringType: + err = quoteString(w, f.String, shouldQuote) + + case StringerType: + s, ok := f.Interface.(fmt.Stringer) + if ok { + err = quoteString(w, s.String(), shouldQuote) + } else if f.Interface == nil { + err = quoteString(w, "", shouldQuote) + } else { + err = fmt.Errorf("invalid fmt.Stringer for key %s", f.Key) + } + + case StructType: + s, ok := f.Interface.(LogWriter) + if ok { + err = s.LogWrite(w) + break + } + // structs that do not implement LogWriter fall back to reflection via Printf. + // TODO: create custom reflection-based encoder. + _, err = fmt.Fprintf(w, "%v", f.Interface) + + case ErrorType: + // TODO: create custom error encoder. + err = quoteString(w, fmt.Sprintf("%v", f.Interface), shouldQuote) + + case BoolType: + var b bool + if f.Integer != 0 { + b = true + } + _, err = io.WriteString(w, strconv.FormatBool(b)) + + case TimestampMillisType: + ts := time.Unix(f.Integer/1000, (f.Integer%1000)*int64(time.Millisecond)) + err = quoteString(w, ts.UTC().Format(TimestampMillisFormat), shouldQuote) + + case TimeType: + t, ok := f.Interface.(time.Time) + if !ok { + err = errors.New("invalid time") + break + } + err = quoteString(w, t.Format(DefTimestampFormat), shouldQuote) + + case DurationType: + _, err = fmt.Fprintf(w, "%s", time.Duration(f.Integer)) + + case Int64Type, Int32Type, IntType: + _, err = io.WriteString(w, strconv.FormatInt(f.Integer, 10)) + + case Uint64Type, Uint32Type, UintType: + _, err = io.WriteString(w, strconv.FormatUint(uint64(f.Integer), 10)) + + case Float64Type, Float32Type: + size := 64 + if f.Type == Float32Type { + size = 32 + } + err = quoteString(w, strconv.FormatFloat(f.Float, 'f', -1, size), shouldQuote) + + case BinaryType: + b, ok := f.Interface.([]byte) + if ok { + _, err = fmt.Fprintf(w, "[%X]", b) + break + } + _, err = fmt.Fprintf(w, "[%v]", f.Interface) + + case ArrayType: + a := reflect.ValueOf(f.Interface) + arr: + for i := 0; i < a.Len(); i++ { + item := a.Index(i) + switch v := item.Interface().(type) { + case LogWriter: + if err = v.LogWrite(w); err != nil { + break arr + } + case fmt.Stringer: + if err = quoteString(w, v.String(), shouldQuote); err != nil { + break arr + } + default: + s := fmt.Sprintf("%v", v) + if err = quoteString(w, s, shouldQuote); err != nil { + break arr + } + } + if i != a.Len()-1 { + if _, err = w.Write(Comma); err != nil { + break arr + } + } + } + + case MapType: + a := reflect.ValueOf(f.Interface) + iter := a.MapRange() + // Already advance to first element + if !iter.Next() { + return nil + } + it: + for { + if _, err = io.WriteString(w, iter.Key().String()); err != nil { + break it + } + if _, err = w.Write(Equals); err != nil { + break it + } + val := iter.Value().Interface() + switch v := val.(type) { + case LogWriter: + if err = v.LogWrite(w); err != nil { + break it + } + case fmt.Stringer: + if err = quoteString(w, v.String(), shouldQuote); err != nil { + break it + } + default: + s := fmt.Sprintf("%v", v) + if err = quoteString(w, s, shouldQuote); err != nil { + break it + } + } + + if !iter.Next() { + break it + } + + if _, err = w.Write(Comma); err != nil { + break it + } + + } + + case UnknownType: + _, err = fmt.Fprintf(w, "%v", f.Interface) + + default: + err = fmt.Errorf("invalid type %d", f.Type) + } + return err +} + +func nilField(key string) Field { + return String(key, "") +} + +func fieldForAny(key string, val interface{}) Field { + switch v := val.(type) { + case LogCloner: + if v == nil { + return nilField(key) + } + c := v.LogClone() + return Field{Key: key, Type: StructType, Interface: c} + case *LogCloner: + if v == nil { + return nilField(key) + } + c := (*v).LogClone() + return Field{Key: key, Type: StructType, Interface: c} + case LogWriter: + if v == nil { + return nilField(key) + } + return Field{Key: key, Type: StructType, Interface: v} + case *LogWriter: + if v == nil { + return nilField(key) + } + return Field{Key: key, Type: StructType, Interface: *v} + case bool: + return Bool(key, v) + case *bool: + if v == nil { + return nilField(key) + } + return Bool(key, *v) + case float64: + return Float(key, v) + case *float64: + if v == nil { + return nilField(key) + } + return Float(key, *v) + case float32: + return Float(key, v) + case *float32: + if v == nil { + return nilField(key) + } + return Float(key, *v) + case int: + return Int(key, v) + case *int: + if v == nil { + return nilField(key) + } + return Int(key, *v) + case int64: + return Int(key, v) + case *int64: + if v == nil { + return nilField(key) + } + return Int(key, *v) + case int32: + return Int(key, v) + case *int32: + if v == nil { + return nilField(key) + } + return Int(key, *v) + case int16: + return Int(key, int32(v)) + case *int16: + if v == nil { + return nilField(key) + } + return Int(key, int32(*v)) + case int8: + return Int(key, int32(v)) + case *int8: + if v == nil { + return nilField(key) + } + return Int(key, int32(*v)) + case string: + return String(key, v) + case *string: + if v == nil { + return nilField(key) + } + return String(key, *v) + case uint: + return Uint(key, v) + case *uint: + if v == nil { + return nilField(key) + } + return Uint(key, *v) + case uint64: + return Uint(key, v) + case *uint64: + if v == nil { + return nilField(key) + } + return Uint(key, *v) + case uint32: + return Uint(key, v) + case *uint32: + if v == nil { + return nilField(key) + } + return Uint(key, *v) + case uint16: + return Uint(key, uint32(v)) + case *uint16: + if v == nil { + return nilField(key) + } + return Uint(key, uint32(*v)) + case uint8: + return Uint(key, uint32(v)) + case *uint8: + if v == nil { + return nilField(key) + } + return Uint(key, uint32(*v)) + case []byte: + if v == nil { + return nilField(key) + } + return Field{Key: key, Type: BinaryType, Interface: v} + case time.Time: + return Time(key, v) + case *time.Time: + if v == nil { + return nilField(key) + } + return Time(key, *v) + case time.Duration: + return Duration(key, v) + case *time.Duration: + if v == nil { + return nilField(key) + } + return Duration(key, *v) + case error: + return NamedErr(key, v) + case fmt.Stringer: + if v == nil { + return nilField(key) + } + return Field{Key: key, Type: StringerType, Interface: v} + case *fmt.Stringer: + if v == nil { + return nilField(key) + } + return Field{Key: key, Type: StringerType, Interface: *v} + default: + return Field{Key: key, Type: UnknownType, Interface: val} + } +} + +// FieldSorter provides sorting of an array of fields by key. +type FieldSorter []Field + +func (fs FieldSorter) Len() int { return len(fs) } +func (fs FieldSorter) Less(i, j int) bool { return fs[i].Key < fs[j].Key } +func (fs FieldSorter) Swap(i, j int) { fs[i], fs[j] = fs[j], fs[i] } diff --git a/vendor/github.com/mattermost/logr/v2/fieldapi.go b/vendor/github.com/mattermost/logr/v2/fieldapi.go new file mode 100644 index 00000000..351e78de --- /dev/null +++ b/vendor/github.com/mattermost/logr/v2/fieldapi.go @@ -0,0 +1,127 @@ +package logr + +import ( + "fmt" + "time" +) + +// Any picks the best supported field type based on type of val. +// For best performance when passing a struct (or struct pointer), +// implement `logr.LogWriter` on the struct, otherwise reflection +// will be used to generate a string representation. +func Any(key string, val any) Field { + return fieldForAny(key, val) +} + +// Int64 constructs a field containing a key and Int64 value. +// +// Deprecated: Use [logr.Int] instead. +func Int64(key string, val int64) Field { + return Field{Key: key, Type: Int64Type, Integer: val} +} + +// Int32 constructs a field containing a key and Int32 value. +// +// Deprecated: Use [logr.Int] instead. +func Int32(key string, val int32) Field { + return Field{Key: key, Type: Int32Type, Integer: int64(val)} +} + +// Int constructs a field containing a key and int value. +func Int[T ~int | ~int8 | ~int16 | ~int32 | ~int64](key string, val T) Field { + return Field{Key: key, Type: IntType, Integer: int64(val)} +} + +// Uint64 constructs a field containing a key and Uint64 value. +// +// Deprecated: Use [logr.Uint] instead. +func Uint64(key string, val uint64) Field { + return Field{Key: key, Type: Uint64Type, Integer: int64(val)} +} + +// Uint32 constructs a field containing a key and Uint32 value. +// +// Deprecated: Use [logr.Uint] instead +func Uint32(key string, val uint32) Field { + return Field{Key: key, Type: Uint32Type, Integer: int64(val)} +} + +// Uint constructs a field containing a key and uint value. +func Uint[T ~uint | ~uint8 | ~uint16 | ~uint32 | ~uint64 | ~uintptr](key string, val T) Field { + return Field{Key: key, Type: UintType, Integer: int64(val)} +} + +// Float64 constructs a field containing a key and Float64 value. +// +// Deprecated: Use [logr.Float] instead +func Float64(key string, val float64) Field { + return Field{Key: key, Type: Float64Type, Float: val} +} + +// Float32 constructs a field containing a key and Float32 value. +// +// Deprecated: Use [logr.Float] instead +func Float32(key string, val float32) Field { + return Field{Key: key, Type: Float32Type, Float: float64(val)} +} + +// Float32 constructs a field containing a key and float value. +func Float[T ~float32 | ~float64](key string, val T) Field { + return Field{Key: key, Type: Float32Type, Float: float64(val)} +} + +// String constructs a field containing a key and String value. +func String[T ~string | ~[]byte](key string, val T) Field { + return Field{Key: key, Type: StringType, String: string(val)} +} + +// Stringer constructs a field containing a key and a `fmt.Stringer` value. +// The `String` method will be called in lazy fashion. +func Stringer(key string, val fmt.Stringer) Field { + return Field{Key: key, Type: StringerType, Interface: val} +} + +// Err constructs a field containing a default key ("error") and error value. +func Err(err error) Field { + return NamedErr("error", err) +} + +// NamedErr constructs a field containing a key and error value. +func NamedErr(key string, err error) Field { + return Field{Key: key, Type: ErrorType, Interface: err} +} + +// Bool constructs a field containing a key and bool value. +func Bool[T ~bool](key string, val T) Field { + var b int64 + if val { + b = 1 + } + return Field{Key: key, Type: BoolType, Integer: b} +} + +// Time constructs a field containing a key and time.Time value. +func Time(key string, val time.Time) Field { + return Field{Key: key, Type: TimeType, Interface: val} +} + +// Duration constructs a field containing a key and time.Duration value. +func Duration(key string, val time.Duration) Field { + return Field{Key: key, Type: DurationType, Integer: int64(val)} +} + +// Millis constructs a field containing a key and timestamp value. +// The timestamp is expected to be milliseconds since Jan 1, 1970 UTC. +func Millis(key string, val int64) Field { + return Field{Key: key, Type: TimestampMillisType, Integer: val} +} + +// Array constructs a field containing a key and array value. +func Array[S ~[]E, E any](key string, val S) Field { + return Field{Key: key, Type: ArrayType, Interface: val} +} + +// Map constructs a field containing a key and map value. +func Map[M ~map[K]V, K comparable, V any](key string, val M) Field { + return Field{Key: key, Type: MapType, Interface: val} +} diff --git a/vendor/github.com/mattermost/logr/v2/filter.go b/vendor/github.com/mattermost/logr/v2/filter.go new file mode 100644 index 00000000..a52a7cf4 --- /dev/null +++ b/vendor/github.com/mattermost/logr/v2/filter.go @@ -0,0 +1,10 @@ +package logr + +// Filter allows targets to determine which Level(s) are active +// for logging and which Level(s) require a stack trace to be output. +// A default implementation using "panic, fatal..." is provided, and +// a more flexible alternative implementation is also provided that +// allows any number of custom levels. +type Filter interface { + GetEnabledLevel(level Level) (Level, bool) +} diff --git a/vendor/github.com/mattermost/logr/v2/filtercustom.go b/vendor/github.com/mattermost/logr/v2/filtercustom.go new file mode 100644 index 00000000..c20f2811 --- /dev/null +++ b/vendor/github.com/mattermost/logr/v2/filtercustom.go @@ -0,0 +1,47 @@ +package logr + +import ( + "sync" +) + +// CustomFilter allows targets to enable logging via a list of discrete levels. +type CustomFilter struct { + mux sync.RWMutex + levels map[LevelID]Level +} + +// NewCustomFilter creates a filter supporting discrete log levels. +func NewCustomFilter(levels ...Level) *CustomFilter { + filter := &CustomFilter{} + filter.Add(levels...) + return filter +} + +// GetEnabledLevel returns the Level with the specified Level.ID and whether the level +// is enabled for this filter. +func (cf *CustomFilter) GetEnabledLevel(level Level) (Level, bool) { + cf.mux.RLock() + defer cf.mux.RUnlock() + levelEnabled, ok := cf.levels[level.ID] + + if ok && levelEnabled.Name == "" { + levelEnabled.Name = level.Name + } + + return levelEnabled, ok +} + +// Add adds one or more levels to the list. Adding a level enables logging for +// that level on any targets using this CustomFilter. +func (cf *CustomFilter) Add(levels ...Level) { + cf.mux.Lock() + defer cf.mux.Unlock() + + if cf.levels == nil { + cf.levels = make(map[LevelID]Level) + } + + for _, s := range levels { + cf.levels[s.ID] = s + } +} diff --git a/vendor/github.com/mattermost/logr/v2/filterstd.go b/vendor/github.com/mattermost/logr/v2/filterstd.go new file mode 100644 index 00000000..fe917fe5 --- /dev/null +++ b/vendor/github.com/mattermost/logr/v2/filterstd.go @@ -0,0 +1,71 @@ +package logr + +// StdFilter allows targets to filter via classic log levels where any level +// beyond a certain verbosity/severity is enabled. +type StdFilter struct { + Lvl Level + Stacktrace Level +} + +// GetEnabledLevel returns the Level with the specified Level.ID and whether the level +// is enabled for this filter. +func (lt StdFilter) GetEnabledLevel(level Level) (Level, bool) { + enabled := level.ID <= lt.Lvl.ID + stackTrace := level.ID <= lt.Stacktrace.ID + var levelEnabled Level + + if enabled { + switch level.ID { + case Panic.ID: + levelEnabled = Panic + case Fatal.ID: + levelEnabled = Fatal + case Error.ID: + levelEnabled = Error + case Warn.ID: + levelEnabled = Warn + case Info.ID: + levelEnabled = Info + case Debug.ID: + levelEnabled = Debug + case Trace.ID: + levelEnabled = Trace + default: + levelEnabled = level + } + } + + if stackTrace { + levelEnabled.Stacktrace = true + } + + return levelEnabled, enabled +} + +// IsEnabled returns true if the specified Level is at or above this verbosity. Also +// determines if a stack trace is required. +func (lt StdFilter) IsEnabled(level Level) bool { + return level.ID <= lt.Lvl.ID +} + +// IsStacktraceEnabled returns true if the specified Level requires a stack trace. +func (lt StdFilter) IsStacktraceEnabled(level Level) bool { + return level.ID <= lt.Stacktrace.ID +} + +var ( + // Panic is the highest level of severity. + Panic = Level{ID: 0, Name: "panic", Color: Red} + // Fatal designates a catastrophic error. + Fatal = Level{ID: 1, Name: "fatal", Color: Red} + // Error designates a serious but possibly recoverable error. + Error = Level{ID: 2, Name: "error", Color: Red} + // Warn designates non-critical error. + Warn = Level{ID: 3, Name: "warn", Color: Yellow} + // Info designates information regarding application events. + Info = Level{ID: 4, Name: "info", Color: Cyan} + // Debug designates verbose information typically used for debugging. + Debug = Level{ID: 5, Name: "debug", Color: NoColor} + // Trace designates the highest verbosity of log output. + Trace = Level{ID: 6, Name: "trace", Color: NoColor} +) diff --git a/vendor/github.com/mattermost/logr/v2/formatter.go b/vendor/github.com/mattermost/logr/v2/formatter.go new file mode 100644 index 00000000..1db8c97d --- /dev/null +++ b/vendor/github.com/mattermost/logr/v2/formatter.go @@ -0,0 +1,210 @@ +package logr + +import ( + "bytes" + "fmt" + "io" + "runtime" + "strconv" +) + +// Formatter turns a LogRec into a formatted string. +type Formatter interface { + // IsStacktraceNeeded returns true if this formatter requires a stacktrace to be + // generated for each LogRecord. Enabling features such as `Caller` field require + // a stacktrace. + IsStacktraceNeeded() bool + + // Format converts a log record to bytes. If buf is not nil then it will be + // be filled with the formatted results, otherwise a new buffer will be allocated. + Format(rec *LogRec, level Level, buf *bytes.Buffer) (*bytes.Buffer, error) +} + +const ( + // DefTimestampFormat is the default time stamp format used by Plain formatter and others. + DefTimestampFormat = "2006-01-02 15:04:05.000 Z07:00" + + // TimestampMillisFormat is the format for logging milliseconds UTC + TimestampMillisFormat = "Jan _2 15:04:05.000" +) + +// LimitByteSlice discards the bytes from a slice that exceeds the limit +func LimitByteSlice(b []byte, limit int) []byte { + if limit > 0 && limit < len(b) { + lb := make([]byte, limit, limit+3) + copy(lb, b[:limit]) + return append(lb, []byte("...")...) + } + + return b +} + +// LimitString discards the runes from a slice that exceeds the limit +func LimitString(b string, limit int) string { + return string(LimitByteSlice([]byte(b), limit)) +} + +type LimitedStringer struct { + fmt.Stringer + Limit int +} + +func (ls *LimitedStringer) String() string { + return LimitString(ls.Stringer.String(), ls.Limit) +} + +type Writer struct { + io.Writer +} + +func (w Writer) Writes(elems ...[]byte) (int, error) { + var count int + for _, e := range elems { + if c, err := w.Write(e); err != nil { + return count + c, err + } else { + count += c + } + } + return count, nil +} + +// DefaultFormatter is the default formatter, outputting only text with +// no colors and a space delimiter. Use `format.Plain` instead. +type DefaultFormatter struct { +} + +// IsStacktraceNeeded always returns false for default formatter since the +// `Caller` field is not supported. +func (p *DefaultFormatter) IsStacktraceNeeded() bool { + return false +} + +// Format converts a log record to bytes. +func (p *DefaultFormatter) Format(rec *LogRec, level Level, buf *bytes.Buffer) (*bytes.Buffer, error) { + if buf == nil { + buf = &bytes.Buffer{} + } + timestampFmt := DefTimestampFormat + + buf.WriteString(rec.Time().Format(timestampFmt)) + buf.Write(Space) + + buf.WriteString(level.Name) + buf.Write(Space) + + buf.WriteString(rec.Msg()) + buf.Write(Space) + + fields := rec.Fields() + if len(fields) > 0 { + if err := WriteFields(buf, fields, Space, NoColor); err != nil { + return nil, err + } + } + + if level.Stacktrace { + frames := rec.StackFrames() + if len(frames) > 0 { + buf.Write(Newline) + if err := WriteStacktrace(buf, rec.StackFrames()); err != nil { + return nil, err + } + } + } + buf.Write(Newline) + + return buf, nil +} + +// WriteFields writes zero or more name value pairs to the io.Writer. +// The pairs output in key=value format with optional separator between fields. +func WriteFields(w io.Writer, fields []Field, separator []byte, color Color) error { + ws := Writer{w} + + sep := []byte{} + for _, field := range fields { + if err := writeField(ws, field, sep, color); err != nil { + return err + } + sep = separator + } + return nil +} + +func writeField(ws Writer, field Field, sep []byte, color Color) error { + if len(sep) != 0 { + if _, err := ws.Write(sep); err != nil { + return err + } + } + if err := WriteWithColor(ws, field.Key, color); err != nil { + return err + } + if _, err := ws.Write(Equals); err != nil { + return err + } + return field.ValueString(ws, shouldQuote) +} + +// shouldQuote returns true if val contains any characters that might be unsafe +// when injecting log output into an aggregator, viewer or report. +func shouldQuote(val string) bool { + for _, c := range val { + if !((c >= '0' && c <= '9') || + (c >= 'a' && c <= 'z') || + (c >= 'A' && c <= 'Z') || + c == '-' || c == '.' || c == '_' || c == '/' || c == '@' || c == '^' || c == '+') { + return true + } + } + return false +} + +// WriteStacktrace formats and outputs a stack trace to an io.Writer. +func WriteStacktrace(w io.Writer, frames []runtime.Frame) error { + ws := Writer{w} + for _, frame := range frames { + if frame.Function != "" { + if _, err := ws.Writes(Space, Space, []byte(frame.Function), Newline); err != nil { + return err + } + } + if frame.File != "" { + s := strconv.FormatInt(int64(frame.Line), 10) + if _, err := ws.Writes([]byte{' ', ' ', ' ', ' ', ' ', ' '}, []byte(frame.File), Colon, []byte(s), Newline); err != nil { + return err + } + } + } + return nil +} + +// WriteWithColor outputs a string with the specified ANSI color. +func WriteWithColor(w io.Writer, s string, color Color) error { + var err error + + writer := func(buf []byte) { + if err != nil { + return + } + _, err = w.Write(buf) + } + + if color != NoColor { + writer(AnsiColorPrefix) + writer([]byte(strconv.FormatInt(int64(color), 10))) + writer(AnsiColorSuffix) + } + + if err == nil { + _, err = io.WriteString(w, s) + } + + if color != NoColor { + writer(AnsiColorPrefix) + writer([]byte(strconv.FormatInt(int64(NoColor), 10))) + writer(AnsiColorSuffix) + } + return err +} diff --git a/vendor/github.com/mattermost/logr/v2/formatters/gelf.go b/vendor/github.com/mattermost/logr/v2/formatters/gelf.go new file mode 100644 index 00000000..01cf3ec7 --- /dev/null +++ b/vendor/github.com/mattermost/logr/v2/formatters/gelf.go @@ -0,0 +1,161 @@ +package formatters + +import ( + "bytes" + "fmt" + "net" + "os" + "strings" + + "github.com/francoispqt/gojay" + "github.com/mattermost/logr/v2" +) + +const ( + GelfVersion = "1.1" + GelfVersionKey = "version" + GelfHostKey = "host" + GelfShortKey = "short_message" + GelfFullKey = "full_message" + GelfTimestampKey = "timestamp" + GelfLevelKey = "level" +) + +// Gelf formats log records as GELF rcords (https://docs.graylog.org/en/4.0/pages/gelf.html). +type Gelf struct { + // Hostname allows a custom hostname, otherwise os.Hostname is used + Hostname string `json:"hostname"` + + // EnableCaller enables output of the file and line number that emitted a log record. + EnableCaller bool `json:"enable_caller"` + + // FieldSorter allows custom sorting for the context fields. + FieldSorter func(fields []logr.Field) []logr.Field `json:"-"` +} + +func (g *Gelf) CheckValid() error { + return nil +} + +// IsStacktraceNeeded returns true if a stacktrace is needed so we can output the `Caller` field. +func (g *Gelf) IsStacktraceNeeded() bool { + return g.EnableCaller +} + +// Format converts a log record to bytes in GELF format. +func (g *Gelf) Format(rec *logr.LogRec, level logr.Level, buf *bytes.Buffer) (*bytes.Buffer, error) { + if buf == nil { + buf = &bytes.Buffer{} + } + enc := gojay.BorrowEncoder(buf) + defer func() { + enc.Release() + }() + + gr := gelfRecord{ + LogRec: rec, + Gelf: g, + level: level, + sorter: g.FieldSorter, + } + + err := enc.EncodeObject(gr) + if err != nil { + return nil, err + } + + buf.WriteByte(0) + return buf, nil +} + +type gelfRecord struct { + *logr.LogRec + *Gelf + level logr.Level + sorter func(fields []logr.Field) []logr.Field +} + +// MarshalJSONObject encodes the LogRec as JSON. +func (gr gelfRecord) MarshalJSONObject(enc *gojay.Encoder) { + enc.AddStringKey(GelfVersionKey, GelfVersion) + enc.AddStringKey(GelfHostKey, gr.getHostname()) + enc.AddStringKey(GelfShortKey, gr.safeMsg("-")) // Gelf requires a non-empty `short_message` + + if gr.level.Stacktrace { + frames := gr.StackFrames() + if len(frames) != 0 { + var sbuf strings.Builder + for _, frame := range frames { + fmt.Fprintf(&sbuf, "%s\n %s:%d\n", frame.Function, frame.File, frame.Line) + } + enc.AddStringKey(GelfFullKey, sbuf.String()) + } + } + + secs := float64(gr.Time().UTC().Unix()) + millis := float64(gr.Time().Nanosecond() / 1000000) + ts := secs + (millis / 1000) + enc.AddFloat64Key(GelfTimestampKey, ts) + + enc.AddUint32Key(GelfLevelKey, uint32(gr.level.ID)) + + var fields []logr.Field + if gr.EnableCaller { + caller := logr.Field{ + Key: "_caller", + Type: logr.StringType, + String: gr.LogRec.Caller(), + } + fields = append(fields, caller) + } + + fields = append(fields, gr.Fields()...) + if gr.sorter != nil { + fields = gr.sorter(fields) + } + + if len(fields) > 0 { + for _, field := range fields { + if !strings.HasPrefix("_", field.Key) { + field.Key = "_" + field.Key + } + if err := encodeField(enc, field); err != nil { + enc.AddStringKey(field.Key, fmt.Sprintf("", err)) + } + } + } +} + +// IsNil returns true if the gelf record pointer is nil. +func (gr gelfRecord) IsNil() bool { + return gr.LogRec == nil +} + +// safeMsg returns the log record Message field or an alternate string when msg is empty. +func (gr gelfRecord) safeMsg(alt string) string { + s := gr.Msg() + if s == "" { + s = alt + } + return s +} + +func (g *Gelf) getHostname() string { + if g.Hostname != "" { + return g.Hostname + } + h, err := os.Hostname() + if err == nil { + return h + } + + // get the egress IP by fake dialing any address. UDP ensures no dial. + conn, err := net.Dial("udp", "8.8.8.8:80") + if err != nil { + return "unknown" + } + defer conn.Close() + + local := conn.LocalAddr().(*net.UDPAddr) + return local.IP.String() +} diff --git a/vendor/github.com/mattermost/logr/v2/formatters/json.go b/vendor/github.com/mattermost/logr/v2/formatters/json.go new file mode 100644 index 00000000..172b9612 --- /dev/null +++ b/vendor/github.com/mattermost/logr/v2/formatters/json.go @@ -0,0 +1,273 @@ +package formatters + +import ( + "bytes" + "encoding/json" + "fmt" + "runtime" + "strings" + "sync" + + "github.com/francoispqt/gojay" + "github.com/mattermost/logr/v2" +) + +// JSON formats log records as JSON. +type JSON struct { + // DisableTimestamp disables output of timestamp field. + DisableTimestamp bool `json:"disable_timestamp"` + // DisableLevel disables output of level field. + DisableLevel bool `json:"disable_level"` + // DisableMsg disables output of msg field. + DisableMsg bool `json:"disable_msg"` + // DisableFields disables output of all fields. + DisableFields bool `json:"disable_fields"` + // DisableStacktrace disables output of stack trace. + DisableStacktrace bool `json:"disable_stacktrace"` + // EnableCaller enables output of the file and line number that emitted a log record. + EnableCaller bool `json:"enable_caller"` + + // TimestampFormat is an optional format for timestamps. If empty + // then DefTimestampFormat is used. + TimestampFormat string `json:"timestamp_format"` + + // KeyTimestamp overrides the timestamp field key name. + KeyTimestamp string `json:"key_timestamp"` + + // KeyLevel overrides the level field key name. + KeyLevel string `json:"key_level"` + + // KeyMsg overrides the msg field key name. + KeyMsg string `json:"key_msg"` + + // KeyGroupFields when not empty will group all context fields + // under this key. + KeyGroupFields string `json:"key_group_fields"` + + // KeyStacktrace overrides the stacktrace field key name. + KeyStacktrace string `json:"key_stacktrace"` + + // KeyCaller overrides the caller field key name. + KeyCaller string `json:"key_caller"` + + // FieldSorter allows custom sorting of the fields. If nil then + // no sorting is done. + FieldSorter func(fields []logr.Field) []logr.Field `json:"-"` + + once sync.Once +} + +func (j *JSON) CheckValid() error { + return nil +} + +// IsStacktraceNeeded returns true if a stacktrace is needed so we can output the `Caller` field. +func (j *JSON) IsStacktraceNeeded() bool { + return j.EnableCaller +} + +// Format converts a log record to bytes in JSON format. +func (j *JSON) Format(rec *logr.LogRec, level logr.Level, buf *bytes.Buffer) (*bytes.Buffer, error) { + j.once.Do(j.applyDefaultKeyNames) + + if buf == nil { + buf = &bytes.Buffer{} + } + enc := gojay.BorrowEncoder(buf) + defer func() { + enc.Release() + }() + + jlr := JSONLogRec{ + LogRec: rec, + JSON: j, + level: level, + sorter: j.FieldSorter, + } + + err := enc.EncodeObject(jlr) + if err != nil { + return nil, err + } + buf.WriteByte('\n') + return buf, nil +} + +func (j *JSON) applyDefaultKeyNames() { + if j.KeyTimestamp == "" { + j.KeyTimestamp = "timestamp" + } + if j.KeyLevel == "" { + j.KeyLevel = "level" + } + if j.KeyMsg == "" { + j.KeyMsg = "msg" + } + if j.KeyStacktrace == "" { + j.KeyStacktrace = "stacktrace" + } + if j.KeyCaller == "" { + j.KeyCaller = "caller" + } +} + +// JSONLogRec decorates a LogRec adding JSON encoding. +type JSONLogRec struct { + *logr.LogRec + *JSON + level logr.Level + sorter func(fields []logr.Field) []logr.Field +} + +// MarshalJSONObject encodes the LogRec as JSON. +func (jlr JSONLogRec) MarshalJSONObject(enc *gojay.Encoder) { + if !jlr.DisableTimestamp { + timestampFmt := jlr.TimestampFormat + if timestampFmt == "" { + timestampFmt = logr.DefTimestampFormat + } + time := jlr.Time() + enc.AddTimeKey(jlr.KeyTimestamp, &time, timestampFmt) + } + if !jlr.DisableLevel { + enc.AddStringKey(jlr.KeyLevel, jlr.level.Name) + } + if !jlr.DisableMsg { + enc.AddStringKey(jlr.KeyMsg, jlr.Msg()) + } + if jlr.EnableCaller { + enc.AddStringKey(jlr.KeyCaller, jlr.Caller()) + } + if !jlr.DisableFields { + fields := jlr.Fields() + if jlr.sorter != nil { + fields = jlr.sorter(fields) + } + if jlr.KeyGroupFields != "" { + enc.AddObjectKey(jlr.KeyGroupFields, FieldArray(fields)) + } else { + if len(fields) > 0 { + for _, field := range fields { + field = jlr.prefixCollision(field) + if err := encodeField(enc, field); err != nil { + enc.AddStringKey(field.Key, "") + } + } + } + } + } + if jlr.level.Stacktrace && !jlr.DisableStacktrace { + frames := jlr.StackFrames() + if len(frames) > 0 { + enc.AddArrayKey(jlr.KeyStacktrace, stackFrames(frames)) + } + } +} + +// IsNil returns true if the LogRec pointer is nil. +func (rec JSONLogRec) IsNil() bool { + return rec.LogRec == nil +} + +func (rec JSONLogRec) prefixCollision(field logr.Field) logr.Field { + switch field.Key { + case rec.KeyTimestamp, rec.KeyLevel, rec.KeyMsg, rec.KeyStacktrace: + f := field + f.Key = "_" + field.Key + return rec.prefixCollision(f) + } + return field +} + +type stackFrames []runtime.Frame + +// MarshalJSONArray encodes stackFrames slice as JSON. +func (s stackFrames) MarshalJSONArray(enc *gojay.Encoder) { + for _, frame := range s { + enc.AddObject(stackFrame(frame)) + } +} + +// IsNil returns true if stackFrames is empty slice. +func (s stackFrames) IsNil() bool { + return len(s) == 0 +} + +type stackFrame runtime.Frame + +// MarshalJSONArray encodes stackFrame as JSON. +func (f stackFrame) MarshalJSONObject(enc *gojay.Encoder) { + enc.AddStringKey("Function", f.Function) + enc.AddStringKey("File", f.File) + enc.AddIntKey("Line", f.Line) +} + +func (f stackFrame) IsNil() bool { + return false +} + +type FieldArray []logr.Field + +// MarshalJSONObject encodes Fields map to JSON. +func (fa FieldArray) MarshalJSONObject(enc *gojay.Encoder) { + for _, fld := range fa { + if err := encodeField(enc, fld); err != nil { + enc.AddStringKey(fld.Key, "") + } + } +} + +// IsNil returns true if map is nil. +func (fa FieldArray) IsNil() bool { + return fa == nil +} + +func encodeField(enc *gojay.Encoder, field logr.Field) error { + // first check if the value has a marshaller already. + switch vt := field.Interface.(type) { + case gojay.MarshalerJSONObject: + enc.AddObjectKey(field.Key, vt) + return nil + case gojay.MarshalerJSONArray: + enc.AddArrayKey(field.Key, vt) + return nil + } + + switch field.Type { + case logr.StringType: + enc.AddStringKey(field.Key, field.String) + + case logr.BoolType: + var b bool + if field.Integer != 0 { + b = true + } + enc.AddBoolKey(field.Key, b) + + case logr.StructType, logr.ArrayType, logr.MapType, logr.UnknownType: + b, err := json.Marshal(field.Interface) + if err != nil { + return err + } + embed := gojay.EmbeddedJSON(b) + enc.AddEmbeddedJSONKey(field.Key, &embed) + + case logr.StringerType, logr.ErrorType, logr.TimestampMillisType, logr.TimeType, logr.DurationType, logr.BinaryType: + var buf strings.Builder + _ = field.ValueString(&buf, nil) + enc.AddStringKey(field.Key, buf.String()) + + case logr.Int64Type, logr.Int32Type, logr.IntType: + enc.AddInt64Key(field.Key, field.Integer) + + case logr.Uint64Type, logr.Uint32Type, logr.UintType: + enc.AddUint64Key(field.Key, uint64(field.Integer)) + + case logr.Float64Type, logr.Float32Type: + enc.AddFloat64Key(field.Key, field.Float) + + default: + return fmt.Errorf("invalid field type: %d", field.Type) + } + return nil +} diff --git a/vendor/github.com/mattermost/logr/v2/formatters/plain.go b/vendor/github.com/mattermost/logr/v2/formatters/plain.go new file mode 100644 index 00000000..4d8af643 --- /dev/null +++ b/vendor/github.com/mattermost/logr/v2/formatters/plain.go @@ -0,0 +1,146 @@ +package formatters + +import ( + "bytes" + "fmt" + "strings" + + "github.com/mattermost/logr/v2" +) + +// Plain is the simplest formatter, outputting only text with +// no colors. +type Plain struct { + // DisableTimestamp disables output of timestamp field. + DisableTimestamp bool `json:"disable_timestamp"` + // DisableLevel disables output of level field. + DisableLevel bool `json:"disable_level"` + // DisableMsg disables output of msg field. + DisableMsg bool `json:"disable_msg"` + // DisableFields disables output of all fields. + DisableFields bool `json:"disable_fields"` + // DisableStacktrace disables output of stack trace. + DisableStacktrace bool `json:"disable_stacktrace"` + // EnableCaller enables output of the file and line number that emitted a log record. + EnableCaller bool `json:"enable_caller"` + + // Delim is an optional delimiter output between each log field. + // Defaults to a single space. + Delim string `json:"delim"` + + // MinLevelLen sets the minimum level name length. If the level name is less + // than the minimum it will be padded with spaces. + MinLevelLen int `json:"min_level_len"` + + // MinMessageLen sets the minimum msg length. If the msg text is less + // than the minimum it will be padded with spaces. + MinMessageLen int `json:"min_msg_len"` + + // TimestampFormat is an optional format for timestamps. If empty + // then DefTimestampFormat is used. + TimestampFormat string `json:"timestamp_format"` + + // LineEnd sets the end of line character(s). Defaults to '\n'. + LineEnd string `json:"line_end"` + + // EnableColor sets whether output should include color. + EnableColor bool `json:"enable_color"` +} + +func (p *Plain) CheckValid() error { + if p.MinMessageLen < 0 || p.MinMessageLen > 1024 { + return fmt.Errorf("min_msg_len is invalid(%d)", p.MinMessageLen) + } + return nil +} + +// IsStacktraceNeeded returns true if a stacktrace is needed so we can output the `Caller` field. +func (p *Plain) IsStacktraceNeeded() bool { + return p.EnableCaller +} + +// Format converts a log record to bytes. +func (p *Plain) Format(rec *logr.LogRec, level logr.Level, buf *bytes.Buffer) (*bytes.Buffer, error) { + delim := p.Delim + if delim == "" { + delim = " " + } + if buf == nil { + buf = &bytes.Buffer{} + } + + timestampFmt := p.TimestampFormat + if timestampFmt == "" { + timestampFmt = logr.DefTimestampFormat + } + + color := logr.NoColor + if p.EnableColor { + color = level.Color + } + + if !p.DisableLevel { + _ = logr.WriteWithColor(buf, level.Name, color) + count := len(level.Name) + if p.MinLevelLen > count { + _, _ = buf.WriteString(strings.Repeat(" ", p.MinLevelLen-count)) + } + buf.WriteString(delim) + } + + if !p.DisableTimestamp { + var arr [128]byte + tbuf := rec.Time().AppendFormat(arr[:0], timestampFmt) + buf.WriteByte('[') + buf.Write(tbuf) + buf.WriteByte(']') + buf.WriteString(delim) + } + + if !p.DisableMsg { + count, _ := buf.WriteString(rec.Msg()) + if p.MinMessageLen > count { + _, _ = buf.WriteString(strings.Repeat(" ", p.MinMessageLen-count)) + } + _, _ = buf.WriteString(delim) + } + + var fields []logr.Field + + if p.EnableCaller { + fld := logr.Field{ + Key: "caller", + Type: logr.StringType, + String: rec.Caller(), + } + fields = append(fields, fld) + } + + if !p.DisableFields { + fields = append(fields, rec.Fields()...) + } + + if len(fields) > 0 { + if err := logr.WriteFields(buf, fields, logr.Space, color); err != nil { + return nil, err + } + } + + if level.Stacktrace && !p.DisableStacktrace { + frames := rec.StackFrames() + if len(frames) > 0 { + buf.WriteString("\n") + if err := logr.WriteStacktrace(buf, rec.StackFrames()); err != nil { + return nil, err + } + } + } + + if p.LineEnd == "" { + buf.WriteString("\n") + } else { + buf.WriteString(p.LineEnd) + } + + return buf, nil +} diff --git a/vendor/github.com/mattermost/logr/v2/level.go b/vendor/github.com/mattermost/logr/v2/level.go new file mode 100644 index 00000000..643d68e3 --- /dev/null +++ b/vendor/github.com/mattermost/logr/v2/level.go @@ -0,0 +1,34 @@ +package logr + +var AnsiColorPrefix = []byte("\u001b[") +var AnsiColorSuffix = []byte("m") + +// Color for formatters that support color output. +type Color uint8 + +const ( + NoColor Color = 0 + Red Color = 31 + Green Color = 32 + Yellow Color = 33 + Blue Color = 34 + Magenta Color = 35 + Cyan Color = 36 + White Color = 37 +) + +// LevelID is the unique id of each level. +type LevelID uint + +// Level provides a mechanism to enable/disable specific log lines. +type Level struct { + ID LevelID `json:"id"` + Name string `json:"name"` + Stacktrace bool `json:"stacktrace,omitempty"` + Color Color `json:"color,omitempty"` +} + +// String returns the name of this level. +func (level Level) String() string { + return level.Name +} diff --git a/vendor/github.com/mattermost/logr/v2/levelcache.go b/vendor/github.com/mattermost/logr/v2/levelcache.go new file mode 100644 index 00000000..2cefb61d --- /dev/null +++ b/vendor/github.com/mattermost/logr/v2/levelcache.go @@ -0,0 +1,98 @@ +package logr + +import ( + "fmt" + "sync" +) + +// LevelStatus represents whether a level is enabled and +// requires a stack trace. +type LevelStatus struct { + Enabled bool + Stacktrace bool + empty bool +} + +type levelCache interface { + setup() + get(id LevelID) (LevelStatus, bool) + put(id LevelID, status LevelStatus) error + clear() +} + +// syncMapLevelCache uses sync.Map which may better handle large concurrency +// scenarios. +type syncMapLevelCache struct { + m sync.Map +} + +func (c *syncMapLevelCache) setup() { + c.clear() +} + +func (c *syncMapLevelCache) get(id LevelID) (LevelStatus, bool) { + if id > MaxLevelID { + return LevelStatus{}, false + } + s, _ := c.m.Load(id) + status := s.(LevelStatus) + return status, !status.empty +} + +func (c *syncMapLevelCache) put(id LevelID, status LevelStatus) error { + if id > MaxLevelID { + return fmt.Errorf("level id cannot exceed MaxLevelID (%d)", MaxLevelID) + } + c.m.Store(id, status) + return nil +} + +func (c *syncMapLevelCache) clear() { + var i LevelID + for i = 0; i < MaxLevelID; i++ { + c.m.Store(i, LevelStatus{empty: true}) + } +} + +// arrayLevelCache using array and a mutex. +type arrayLevelCache struct { + arr [MaxLevelID + 1]LevelStatus + mux sync.RWMutex +} + +func (c *arrayLevelCache) setup() { + c.clear() +} + +//var dummy = LevelStatus{} + +func (c *arrayLevelCache) get(id LevelID) (LevelStatus, bool) { + if id > MaxLevelID { + return LevelStatus{}, false + } + c.mux.RLock() + status := c.arr[id] + ok := !status.empty + c.mux.RUnlock() + return status, ok +} + +func (c *arrayLevelCache) put(id LevelID, status LevelStatus) error { + if id > MaxLevelID { + return fmt.Errorf("level id cannot exceed MaxLevelID (%d)", MaxLevelID) + } + c.mux.Lock() + defer c.mux.Unlock() + + c.arr[id] = status + return nil +} + +func (c *arrayLevelCache) clear() { + c.mux.Lock() + defer c.mux.Unlock() + + for i := range c.arr { + c.arr[i] = LevelStatus{empty: true} + } +} diff --git a/vendor/github.com/mattermost/logr/v2/logger.go b/vendor/github.com/mattermost/logr/v2/logger.go new file mode 100644 index 00000000..6ce9c9f0 --- /dev/null +++ b/vendor/github.com/mattermost/logr/v2/logger.go @@ -0,0 +1,99 @@ +package logr + +import "log" + +// Logger provides context for logging via fields. +type Logger struct { + lgr *Logr + fields []Field +} + +// Logr returns the `Logr` instance that created this `Logger`. +func (logger Logger) Logr() *Logr { + return logger.lgr +} + +// With creates a new `Logger` with any existing fields plus the new ones. +func (logger Logger) With(fields ...Field) Logger { + l := Logger{lgr: logger.lgr} + size := len(logger.fields) + len(fields) + if size > 0 { + l.fields = make([]Field, 0, size) + l.fields = append(l.fields, logger.fields...) + l.fields = append(l.fields, fields...) + } + return l +} + +// StdLogger creates a standard logger backed by this `Logr.Logger` instance. +// All log records are emitted with the specified log level. +func (logger Logger) StdLogger(level Level) *log.Logger { + return NewStdLogger(level, logger) +} + +// IsLevelEnabled determines if the specified level is enabled for at least +// one log target. +func (logger Logger) IsLevelEnabled(level Level) bool { + status := logger.Logr().IsLevelEnabled(level) + return status.Enabled +} + +// Sugar creates a new `Logger` with a less structured API. Any fields are preserved. +func (logger Logger) Sugar(fields ...Field) Sugar { + return Sugar{ + logger: logger.With(fields...), + } +} + +// Log checks that the level matches one or more targets, and +// if so, generates a log record that is added to the Logr queue. +// Arguments are handled in the manner of fmt.Print. +func (logger Logger) Log(lvl Level, msg string, fields ...Field) { + status := logger.lgr.IsLevelEnabled(lvl) + if status.Enabled { + rec := NewLogRec(lvl, logger, msg, fields, status.Stacktrace) + logger.lgr.enqueue(rec) + } +} + +// LogM calls `Log` multiple times, one for each level provided. +func (logger Logger) LogM(levels []Level, msg string, fields ...Field) { + for _, lvl := range levels { + logger.Log(lvl, msg, fields...) + } +} + +// Trace is a convenience method equivalent to `Log(TraceLevel, msg, fields...)`. +func (logger Logger) Trace(msg string, fields ...Field) { + logger.Log(Trace, msg, fields...) +} + +// Debug is a convenience method equivalent to `Log(DebugLevel, msg, fields...)`. +func (logger Logger) Debug(msg string, fields ...Field) { + logger.Log(Debug, msg, fields...) +} + +// Info is a convenience method equivalent to `Log(InfoLevel, msg, fields...)`. +func (logger Logger) Info(msg string, fields ...Field) { + logger.Log(Info, msg, fields...) +} + +// Warn is a convenience method equivalent to `Log(WarnLevel, msg, fields...)`. +func (logger Logger) Warn(msg string, fields ...Field) { + logger.Log(Warn, msg, fields...) +} + +// Error is a convenience method equivalent to `Log(ErrorLevel, msg, fields...)`. +func (logger Logger) Error(msg string, fields ...Field) { + logger.Log(Error, msg, fields...) +} + +// Fatal is a convenience method equivalent to `Log(FatalLevel, msg, fields...)` +func (logger Logger) Fatal(msg string, fields ...Field) { + logger.Log(Fatal, msg, fields...) +} + +// Panic is a convenience method equivalent to `Log(PanicLevel, msg, fields...)` +func (logger Logger) Panic(msg string, fields ...Field) { + logger.Log(Panic, msg, fields...) +} diff --git a/vendor/github.com/mattermost/logr/v2/logr.go b/vendor/github.com/mattermost/logr/v2/logr.go new file mode 100644 index 00000000..0025c35e --- /dev/null +++ b/vendor/github.com/mattermost/logr/v2/logr.go @@ -0,0 +1,495 @@ +package logr + +import ( + "bytes" + "context" + "errors" + "fmt" + "os" + "sync" + "sync/atomic" + "time" + + "github.com/wiggin77/merror" +) + +// Logr maintains a list of log targets and accepts incoming +// log records. Use `New` to create instances. +type Logr struct { + tmux sync.RWMutex // targetHosts mutex + targetHosts []*TargetHost + + in chan *LogRec + quit chan struct{} // closed by Shutdown to exit read loop + done chan struct{} // closed when read loop exited + lvlCache levelCache + bufferPool sync.Pool + options *options + + metricsMux sync.RWMutex + metrics *metrics + + shutdown int32 +} + +// New creates a new Logr instance with one or more options specified. +// Some options with invalid values can cause an error to be returned, +// however `logr.New()` using just defaults never errors. +func New(opts ...Option) (*Logr, error) { + options := &options{ + maxQueueSize: DefaultMaxQueueSize, + enqueueTimeout: DefaultEnqueueTimeout, + shutdownTimeout: DefaultShutdownTimeout, + flushTimeout: DefaultFlushTimeout, + maxPooledBuffer: DefaultMaxPooledBuffer, + maxFieldLen: DefaultMaxFieldLength, + } + + lgr := &Logr{options: options} + + // apply the options + for _, opt := range opts { + if err := opt(lgr); err != nil { + return nil, err + } + } + pkgName := GetLogrPackageName() + if pkgName != "" { + opt := StackFilter(pkgName, pkgName+"/targets", pkgName+"/formatters") + _ = opt(lgr) + } + + lgr.in = make(chan *LogRec, lgr.options.maxQueueSize) + lgr.quit = make(chan struct{}) + lgr.done = make(chan struct{}) + + if lgr.options.useSyncMapLevelCache { + lgr.lvlCache = &syncMapLevelCache{} + } else { + lgr.lvlCache = &arrayLevelCache{} + } + lgr.lvlCache.setup() + + lgr.bufferPool = sync.Pool{ + New: func() interface{} { + return new(bytes.Buffer) + }, + } + + lgr.initMetrics(lgr.options.metricsCollector, lgr.options.metricsUpdateFreqMillis) + + go lgr.start() + + return lgr, nil +} + +// AddTarget adds a target to the logger which will receive +// log records for outputting. +func (lgr *Logr) AddTarget(target Target, name string, filter Filter, formatter Formatter, maxQueueSize int) error { + if lgr.IsShutdown() { + return fmt.Errorf("AddTarget called after Logr shut down") + } + + lgr.metricsMux.RLock() + metrics := lgr.metrics + lgr.metricsMux.RUnlock() + + hostOpts := targetHostOptions{ + name: name, + filter: filter, + formatter: formatter, + maxQueueSize: maxQueueSize, + metrics: metrics, + } + + host, err := newTargetHost(target, hostOpts) + if err != nil { + return err + } + + lgr.tmux.Lock() + defer lgr.tmux.Unlock() + + lgr.targetHosts = append(lgr.targetHosts, host) + + lgr.ResetLevelCache() + + return nil +} + +// NewLogger creates a Logger using defaults. A `Logger` is light-weight +// enough to create on-demand, but typically one or more Loggers are +// created and re-used. +func (lgr *Logr) NewLogger() Logger { + logger := Logger{lgr: lgr} + return logger +} + +var levelStatusDisabled = LevelStatus{} + +// IsLevelEnabled returns true if at least one target has the specified +// level enabled. The result is cached so that subsequent checks are fast. +func (lgr *Logr) IsLevelEnabled(lvl Level) LevelStatus { + // No levels enabled after shutdown + if atomic.LoadInt32(&lgr.shutdown) != 0 { + return levelStatusDisabled + } + + // Check cache. + status, ok := lgr.lvlCache.get(lvl.ID) + if ok { + return status + } + + status = LevelStatus{} + + // Cache miss; check each target. + lgr.tmux.RLock() + defer lgr.tmux.RUnlock() + for _, host := range lgr.targetHosts { + enabled, level := host.IsLevelEnabled(lvl) + if enabled { + status.Enabled = true + if level.Stacktrace || host.formatter.IsStacktraceNeeded() { + status.Stacktrace = true + break // if both level and stacktrace enabled then no sense checking more targets + } + } + } + + // Cache and return the result. + if err := lgr.lvlCache.put(lvl.ID, status); err != nil { + lgr.ReportError(err) + return LevelStatus{} + } + return status +} + +// HasTargets returns true only if at least one target exists within the lgr. +func (lgr *Logr) HasTargets() bool { + lgr.tmux.RLock() + defer lgr.tmux.RUnlock() + return len(lgr.targetHosts) > 0 +} + +// TargetInfo provides name and type for a Target. +type TargetInfo struct { + Name string + Type string +} + +// TargetInfos enumerates all the targets added to this lgr. +// The resulting slice represents a snapshot at time of calling. +func (lgr *Logr) TargetInfos() []TargetInfo { + infos := make([]TargetInfo, 0) + + lgr.tmux.RLock() + defer lgr.tmux.RUnlock() + + for _, host := range lgr.targetHosts { + inf := TargetInfo{ + Name: host.String(), + Type: fmt.Sprintf("%T", host.target), + } + infos = append(infos, inf) + } + return infos +} + +// RemoveTargets safely removes one or more targets based on the filtering method. +// f should return true to delete the target, false to keep it. +// When removing a target, best effort is made to write any queued log records before +// closing, with cxt determining how much time can be spent in total. +// Note, keep the timeout short since this method blocks certain logging operations. +func (lgr *Logr) RemoveTargets(cxt context.Context, f func(ti TargetInfo) bool) error { + errs := merror.New() + hosts := make([]*TargetHost, 0) + + lgr.tmux.Lock() + defer lgr.tmux.Unlock() + + for _, host := range lgr.targetHosts { + inf := TargetInfo{ + Name: host.String(), + Type: fmt.Sprintf("%T", host.target), + } + if f(inf) { + if err := host.Shutdown(cxt); err != nil { + errs.Append(err) + } + } else { + hosts = append(hosts, host) + } + } + + lgr.targetHosts = hosts + lgr.ResetLevelCache() + + return errs.ErrorOrNil() +} + +// ResetLevelCache resets the cached results of `IsLevelEnabled`. This is +// called any time a Target is added or a target's level is changed. +func (lgr *Logr) ResetLevelCache() { + lgr.lvlCache.clear() +} + +// SetMetricsCollector sets (or resets) the metrics collector to be used for gathering +// metrics for all targets. Only targets added after this call will use the collector. +// +// To ensure all targets use a collector, use the `SetMetricsCollector` option when +// creating the Logr instead, or configure/reconfigure the Logr after calling this method. +func (lgr *Logr) SetMetricsCollector(collector MetricsCollector, updateFreqMillis int64) { + lgr.initMetrics(collector, updateFreqMillis) +} + +// enqueue adds a log record to the logr queue. If the queue is full then +// this function either blocks or the log record is dropped, depending on +// the result of calling `OnQueueFull`. +func (lgr *Logr) enqueue(rec *LogRec) { + // check if a limit has been configured + if limit := lgr.options.maxFieldLen; limit > 0 { + // we limit the message + rec.msg = LimitString(rec.msg, limit) + + // then we range over fields to apply the limit + for i := range rec.fields { + switch rec.fields[i].Type { + case StringType: + rec.fields[i].String = LimitString(rec.fields[i].String, limit) + case StringerType: + if v, ok := rec.fields[i].Interface.(fmt.Stringer); ok { + rec.fields[i].Interface = &LimitedStringer{ + Stringer: v, + Limit: limit, + } + } + default: + // no limits for other field types + } + } + } + + select { + case lgr.in <- rec: + default: + if lgr.options.onQueueFull != nil && lgr.options.onQueueFull(rec, cap(lgr.in)) { + return // drop the record + } + select { + case <-time.After(lgr.options.enqueueTimeout): + lgr.ReportError(fmt.Errorf("enqueue timed out for log rec [%v]", rec)) + case lgr.in <- rec: // block until success or timeout + } + } +} + +// Flush blocks while flushing the logr queue and all target queues, by +// writing existing log records to valid targets. +// Any attempts to add new log records will block until flush is complete. +// `logr.FlushTimeout` determines how long flush can execute before +// timing out. Use `IsTimeoutError` to determine if the returned error is +// due to a timeout. +func (lgr *Logr) Flush() error { + ctx, cancel := context.WithTimeout(context.Background(), lgr.options.flushTimeout) + defer cancel() + return lgr.FlushWithTimeout(ctx) +} + +// Flush blocks while flushing the logr queue and all target queues, by +// writing existing log records to valid targets. +// Any attempts to add new log records will block until flush is complete. +// Use `IsTimeoutError` to determine if the returned error is +// due to a timeout. +func (lgr *Logr) FlushWithTimeout(ctx context.Context) error { + if !lgr.HasTargets() { + return nil + } + + if lgr.IsShutdown() { + return errors.New("Flush called on shut down Logr") + } + + rec := newFlushLogRec(lgr.NewLogger()) + lgr.enqueue(rec) + + select { + case <-ctx.Done(): + return newTimeoutError("logr queue flush timeout") + case <-rec.flush: + } + return nil +} + +// IsShutdown returns true if this Logr instance has been shut down. +// No further log records can be enqueued and no targets added after +// shutdown. +func (lgr *Logr) IsShutdown() bool { + return atomic.LoadInt32(&lgr.shutdown) != 0 +} + +// Shutdown cleanly stops the logging engine after making best efforts +// to flush all targets. Call this function right before application +// exit - logr cannot be restarted once shut down. +// `logr.ShutdownTimeout` determines how long shutdown can execute before +// timing out. Use `IsTimeoutError` to determine if the returned error is +// due to a timeout. +func (lgr *Logr) Shutdown() error { + ctx, cancel := context.WithTimeout(context.Background(), lgr.options.shutdownTimeout) + defer cancel() + return lgr.ShutdownWithTimeout(ctx) +} + +// Shutdown cleanly stops the logging engine after making best efforts +// to flush all targets. Call this function right before application +// exit - logr cannot be restarted once shut down. +// Use `IsTimeoutError` to determine if the returned error is due to a +// timeout. +func (lgr *Logr) ShutdownWithTimeout(ctx context.Context) error { + if err := lgr.FlushWithTimeout(ctx); err != nil { + return err + } + + if atomic.SwapInt32(&lgr.shutdown, 1) != 0 { + return errors.New("Shutdown called again after shut down") + } + + lgr.ResetLevelCache() + lgr.stopMetricsUpdater() + + close(lgr.quit) + + errs := merror.New() + + // Wait for read loop to exit + select { + case <-ctx.Done(): + errs.Append(newTimeoutError("logr queue shutdown timeout")) + case <-lgr.done: + } + + // logr.in channel should now be drained to targets and no more log records + // can be added. + lgr.tmux.RLock() + defer lgr.tmux.RUnlock() + for _, host := range lgr.targetHosts { + err := host.Shutdown(ctx) + if err != nil { + errs.Append(err) + } + } + return errs.ErrorOrNil() +} + +// ReportError is used to notify the host application of any internal logging errors. +// If `OnLoggerError` is not nil, it is called with the error, otherwise the error is +// output to `os.Stderr`. +func (lgr *Logr) ReportError(err interface{}) { + lgr.incErrorCounter() + + if lgr.options.onLoggerError == nil { + fmt.Fprintln(os.Stderr, err) + return + } + lgr.options.onLoggerError(fmt.Errorf("%v", err)) +} + +// BorrowBuffer borrows a buffer from the pool. Release the buffer to reduce garbage collection. +func (lgr *Logr) BorrowBuffer() *bytes.Buffer { + if lgr.options.disableBufferPool { + return &bytes.Buffer{} + } + return lgr.bufferPool.Get().(*bytes.Buffer) +} + +// ReleaseBuffer returns a buffer to the pool to reduce garbage collection. The buffer is only +// retained if less than MaxPooledBuffer. +func (lgr *Logr) ReleaseBuffer(buf *bytes.Buffer) { + if !lgr.options.disableBufferPool && buf.Cap() < lgr.options.maxPooledBuffer { + buf.Reset() + lgr.bufferPool.Put(buf) + } +} + +// start selects on incoming log records until shutdown record is received. +// Incoming log records are fanned out to all log targets. +func (lgr *Logr) start() { + defer func() { + if r := recover(); r != nil { + lgr.ReportError(r) + go lgr.start() + } else { + close(lgr.done) + } + }() + + for { + var rec *LogRec + select { + case rec = <-lgr.in: + if rec.flush != nil { + lgr.flush(rec.flush) + } else { + rec.prep() + lgr.fanout(rec) + } + case <-lgr.quit: + return + } + } +} + +// fanout pushes a LogRec to all targets. +func (lgr *Logr) fanout(rec *LogRec) { + var host *TargetHost + defer func() { + if r := recover(); r != nil { + lgr.ReportError(fmt.Errorf("fanout failed for target %s, %v", host.String(), r)) + } + }() + + var logged bool + + lgr.tmux.RLock() + defer lgr.tmux.RUnlock() + for _, host = range lgr.targetHosts { + if enabled, _ := host.IsLevelEnabled(rec.Level()); enabled { + host.Log(rec) + logged = true + } + } + + if logged { + lgr.incLoggedCounter() + } +} + +// flush drains the queue and notifies when done. +func (lgr *Logr) flush(done chan<- struct{}) { + // first drain the logr queue. +loop: + for { + var rec *LogRec + select { + case rec = <-lgr.in: + if rec.flush == nil { + rec.prep() + lgr.fanout(rec) + } + default: + break loop + } + } + + logger := lgr.NewLogger() + + // drain all the targets; block until finished. + lgr.tmux.RLock() + defer lgr.tmux.RUnlock() + for _, host := range lgr.targetHosts { + rec := newFlushLogRec(logger) + host.Log(rec) + <-rec.flush + } + done <- struct{}{} +} diff --git a/vendor/github.com/mattermost/logr/v2/logrec.go b/vendor/github.com/mattermost/logr/v2/logrec.go new file mode 100644 index 00000000..76d51b9e --- /dev/null +++ b/vendor/github.com/mattermost/logr/v2/logrec.go @@ -0,0 +1,182 @@ +package logr + +import ( + "fmt" + "path/filepath" + "runtime" + "strings" + "sync" + "time" +) + +// LogRec collects raw, unformatted data to be logged. +// TODO: pool these? how to reliably know when targets are done with them? Copy for each target? +type LogRec struct { + mux sync.RWMutex + time time.Time + + level Level + logger Logger + + msg string + newline bool + fields []Field + + stackPC []uintptr + stackCount int + + // flushes Logr and target queues when not nil. + flush chan struct{} + + // remaining fields calculated by `prep` + frames []runtime.Frame + fieldsAll []Field + caller string +} + +// NewLogRec creates a new LogRec with the current time and optional stack trace. +func NewLogRec(lvl Level, logger Logger, msg string, fields []Field, incStacktrace bool) *LogRec { + rec := &LogRec{time: time.Now(), logger: logger, level: lvl, msg: msg, fields: fields} + if incStacktrace { + rec.stackPC = make([]uintptr, DefaultMaxStackFrames) + rec.stackCount = runtime.Callers(2, rec.stackPC) + } + return rec +} + +// newFlushLogRec creates a LogRec that flushes the Logr queue and +// any target queues that support flushing. +func newFlushLogRec(logger Logger) *LogRec { + return &LogRec{logger: logger, flush: make(chan struct{})} +} + +// prep resolves stack trace to frames. +func (rec *LogRec) prep() { + rec.mux.Lock() + defer rec.mux.Unlock() + + // include log rec fields and logger fields added via "With" + rec.fieldsAll = make([]Field, 0, len(rec.fields)+len(rec.logger.fields)) + rec.fieldsAll = append(rec.fieldsAll, rec.logger.fields...) + rec.fieldsAll = append(rec.fieldsAll, rec.fields...) + + filter := rec.logger.lgr.options.stackFilter + + // resolve stack trace + if rec.stackCount > 0 { + rec.frames = make([]runtime.Frame, 0, rec.stackCount) + frames := runtime.CallersFrames(rec.stackPC[:rec.stackCount]) + for { + frame, more := frames.Next() + + // remove all package entries that are in filter. + pkg := ResolvePackageName(frame.Function) + if _, ok := filter[pkg]; !ok && pkg != "" { + rec.frames = append(rec.frames, frame) + } + + if !more { + break + } + } + } + + // calc caller if stack trace provided + if len(rec.frames) > 0 { + rec.caller = calcCaller(rec.frames) + } +} + +// WithTime returns a shallow copy of the log record while replacing +// the time. This can be used by targets and formatters to adjust +// the time, or take ownership of the log record. +func (rec *LogRec) WithTime(time time.Time) *LogRec { + rec.mux.RLock() + defer rec.mux.RUnlock() + + return &LogRec{ + time: time, + level: rec.level, + logger: rec.logger, + msg: rec.msg, + newline: rec.newline, + fields: rec.fields, + stackPC: rec.stackPC, + stackCount: rec.stackCount, + frames: rec.frames, + } +} + +// Logger returns the `Logger` that created this `LogRec`. +func (rec *LogRec) Logger() Logger { + return rec.logger +} + +// Time returns this log record's time stamp. +func (rec *LogRec) Time() time.Time { + // no locking needed as this field is not mutated. + return rec.time +} + +// Level returns this log record's Level. +func (rec *LogRec) Level() Level { + // no locking needed as this field is not mutated. + return rec.level +} + +// Fields returns this log record's Fields. +func (rec *LogRec) Fields() []Field { + // no locking needed as this field is not mutated. + return rec.fieldsAll +} + +// Msg returns this log record's message text. +func (rec *LogRec) Msg() string { + rec.mux.RLock() + defer rec.mux.RUnlock() + return rec.msg +} + +// StackFrames returns this log record's stack frames or +// nil if no stack trace was required. +func (rec *LogRec) StackFrames() []runtime.Frame { + rec.mux.RLock() + defer rec.mux.RUnlock() + return rec.frames +} + +// Caller returns this log record's caller info, meaning the file and line +// number where this log record was emitted. Returns empty string if no +// stack trace was provided. +func (rec *LogRec) Caller() string { + rec.mux.RLock() + defer rec.mux.RUnlock() + return rec.caller +} + +// String returns a string representation of this log record. +func (rec *LogRec) String() string { + if rec.flush != nil { + return "[flusher]" + } + + f := &DefaultFormatter{} + buf := rec.logger.lgr.BorrowBuffer() + defer rec.logger.lgr.ReleaseBuffer(buf) + buf, _ = f.Format(rec, rec.Level(), buf) + return strings.TrimSpace(buf.String()) +} + +func calcCaller(frames []runtime.Frame) string { + for _, frame := range frames { + if frame.File == "" { + continue + } + + dir, file := filepath.Split(frame.File) + base := filepath.Base(dir) + + return fmt.Sprintf("%s/%s:%d", base, file, frame.Line) + } + return "" +} diff --git a/vendor/github.com/mattermost/logr/v2/metrics.go b/vendor/github.com/mattermost/logr/v2/metrics.go new file mode 100644 index 00000000..f4f4d67f --- /dev/null +++ b/vendor/github.com/mattermost/logr/v2/metrics.go @@ -0,0 +1,140 @@ +package logr + +import "time" + +const ( + DefMetricsUpdateFreqMillis = 15000 // 15 seconds +) + +// Counter is a simple metrics sink that can only increment a value. +// Implementations are external to Logr and provided via `MetricsCollector`. +type Counter interface { + // Inc increments the counter by 1. Use Add to increment it by arbitrary non-negative values. + Inc() + // Add adds the given value to the counter. It panics if the value is < 0. + Add(float64) +} + +// Gauge is a simple metrics sink that can receive values and increase or decrease. +// Implementations are external to Logr and provided via `MetricsCollector`. +type Gauge interface { + // Set sets the Gauge to an arbitrary value. + Set(float64) + // Add adds the given value to the Gauge. (The value can be negative, resulting in a decrease of the Gauge.) + Add(float64) + // Sub subtracts the given value from the Gauge. (The value can be negative, resulting in an increase of the Gauge.) + Sub(float64) +} + +// MetricsCollector provides a way for users of this Logr package to have metrics pushed +// in an efficient way to any backend, e.g. Prometheus. +// For each target added to Logr, the supplied MetricsCollector will provide a Gauge +// and Counters that will be called frequently as logging occurs. +type MetricsCollector interface { + // QueueSizeGauge returns a Gauge that will be updated by the named target. + QueueSizeGauge(target string) (Gauge, error) + // LoggedCounter returns a Counter that will be incremented by the named target. + LoggedCounter(target string) (Counter, error) + // ErrorCounter returns a Counter that will be incremented by the named target. + ErrorCounter(target string) (Counter, error) + // DroppedCounter returns a Counter that will be incremented by the named target. + DroppedCounter(target string) (Counter, error) + // BlockedCounter returns a Counter that will be incremented by the named target. + BlockedCounter(target string) (Counter, error) +} + +// TargetWithMetrics is a target that provides metrics. +type TargetWithMetrics interface { + EnableMetrics(collector MetricsCollector, updateFreqMillis int64) error +} + +type metrics struct { + collector MetricsCollector + updateFreqMillis int64 + queueSizeGauge Gauge + loggedCounter Counter + errorCounter Counter + done chan struct{} +} + +// initMetrics initializes metrics collection. +func (lgr *Logr) initMetrics(collector MetricsCollector, updatefreq int64) { + lgr.stopMetricsUpdater() + + if collector == nil { + lgr.metricsMux.Lock() + lgr.metrics = nil + lgr.metricsMux.Unlock() + return + } + + metrics := &metrics{ + collector: collector, + updateFreqMillis: updatefreq, + done: make(chan struct{}), + } + metrics.queueSizeGauge, _ = collector.QueueSizeGauge("_logr") + metrics.loggedCounter, _ = collector.LoggedCounter("_logr") + metrics.errorCounter, _ = collector.ErrorCounter("_logr") + + lgr.metricsMux.Lock() + lgr.metrics = metrics + lgr.metricsMux.Unlock() + + go lgr.startMetricsUpdater() +} + +func (lgr *Logr) setQueueSizeGauge(val float64) { + lgr.metricsMux.RLock() + defer lgr.metricsMux.RUnlock() + + if lgr.metrics != nil { + lgr.metrics.queueSizeGauge.Set(val) + } +} + +func (lgr *Logr) incLoggedCounter() { + lgr.metricsMux.RLock() + defer lgr.metricsMux.RUnlock() + + if lgr.metrics != nil { + lgr.metrics.loggedCounter.Inc() + } +} + +func (lgr *Logr) incErrorCounter() { + lgr.metricsMux.RLock() + defer lgr.metricsMux.RUnlock() + + if lgr.metrics != nil { + lgr.metrics.errorCounter.Inc() + } +} + +// startMetricsUpdater updates the metrics for any polled values every `metricsUpdateFreqSecs` seconds until +// logr is closed. +func (lgr *Logr) startMetricsUpdater() { + for { + lgr.metricsMux.RLock() + metrics := lgr.metrics + c := metrics.done + lgr.metricsMux.RUnlock() + + select { + case <-c: + return + case <-time.After(time.Duration(metrics.updateFreqMillis) * time.Millisecond): + lgr.setQueueSizeGauge(float64(len(lgr.in))) + } + } +} + +func (lgr *Logr) stopMetricsUpdater() { + lgr.metricsMux.Lock() + defer lgr.metricsMux.Unlock() + + if lgr.metrics != nil && lgr.metrics.done != nil { + close(lgr.metrics.done) + lgr.metrics.done = nil + } +} diff --git a/vendor/github.com/mattermost/logr/v2/options.go b/vendor/github.com/mattermost/logr/v2/options.go new file mode 100644 index 00000000..981cdef6 --- /dev/null +++ b/vendor/github.com/mattermost/logr/v2/options.go @@ -0,0 +1,206 @@ +package logr + +import ( + "errors" + "time" +) + +type Option func(*Logr) error + +type options struct { + maxQueueSize int + onLoggerError func(error) + onQueueFull func(rec *LogRec, maxQueueSize int) bool + onTargetQueueFull func(target Target, rec *LogRec, maxQueueSize int) bool + onExit func(code int) + onPanic func(err interface{}) + enqueueTimeout time.Duration + shutdownTimeout time.Duration + flushTimeout time.Duration + useSyncMapLevelCache bool + maxPooledBuffer int + disableBufferPool bool + metricsCollector MetricsCollector + metricsUpdateFreqMillis int64 + stackFilter map[string]struct{} + maxFieldLen int +} + +// MaxQueueSize is the maximum number of log records that can be queued. +// If exceeded, `OnQueueFull` is called which determines if the log +// record will be dropped or block until add is successful. +// Defaults to DefaultMaxQueueSize. +func MaxQueueSize(size int) Option { + return func(l *Logr) error { + if size < 0 { + return errors.New("size cannot be less than zero") + } + l.options.maxQueueSize = size + return nil + } +} + +// OnLoggerError, when not nil, is called any time an internal +// logging error occurs. For example, this can happen when a +// target cannot connect to its data sink. +func OnLoggerError(f func(error)) Option { + return func(l *Logr) error { + l.options.onLoggerError = f + return nil + } +} + +// OnQueueFull, when not nil, is called on an attempt to add +// a log record to a full Logr queue. +// `MaxQueueSize` can be used to modify the maximum queue size. +// This function should return quickly, with a bool indicating whether +// the log record should be dropped (true) or block until the log record +// is successfully added (false). If nil then blocking (false) is assumed. +func OnQueueFull(f func(rec *LogRec, maxQueueSize int) bool) Option { + return func(l *Logr) error { + l.options.onQueueFull = f + return nil + } +} + +// OnTargetQueueFull, when not nil, is called on an attempt to add +// a log record to a full target queue provided the target supports reporting +// this condition. +// This function should return quickly, with a bool indicating whether +// the log record should be dropped (true) or block until the log record +// is successfully added (false). If nil then blocking (false) is assumed. +func OnTargetQueueFull(f func(target Target, rec *LogRec, maxQueueSize int) bool) Option { + return func(l *Logr) error { + l.options.onTargetQueueFull = f + return nil + } +} + +// OnExit, when not nil, is called when a FatalXXX style log API is called. +// When nil, the default behavior is to cleanly shut down this Logr and +// call `os.Exit(code)`. +func OnExit(f func(code int)) Option { + return func(l *Logr) error { + l.options.onExit = f + return nil + } +} + +// OnPanic, when not nil, is called when a PanicXXX style log API is called. +// When nil, the default behavior is to cleanly shut down this Logr and +// call `panic(err)`. +func OnPanic(f func(err interface{})) Option { + return func(l *Logr) error { + l.options.onPanic = f + return nil + } +} + +// EnqueueTimeout is the amount of time a log record can take to be queued. +// This only applies to blocking enqueue which happen after `logr.OnQueueFull` +// is called and returns false. +func EnqueueTimeout(dur time.Duration) Option { + return func(l *Logr) error { + l.options.enqueueTimeout = dur + return nil + } +} + +// ShutdownTimeout is the amount of time `logr.Shutdown` can execute before +// timing out. An alternative is to use `logr.ShutdownWithContext` and supply +// a timeout. +func ShutdownTimeout(dur time.Duration) Option { + return func(l *Logr) error { + l.options.shutdownTimeout = dur + return nil + } +} + +// FlushTimeout is the amount of time `logr.Flush` can execute before +// timing out. An alternative is to use `logr.FlushWithContext` and supply +// a timeout. +func FlushTimeout(dur time.Duration) Option { + return func(l *Logr) error { + l.options.flushTimeout = dur + return nil + } +} + +// UseSyncMapLevelCache can be set to true when high concurrency (e.g. >32 cores) +// is expected. This may improve performance with large numbers of cores - benchmark +// for your use case. +func UseSyncMapLevelCache(use bool) Option { + return func(l *Logr) error { + l.options.useSyncMapLevelCache = use + return nil + } +} + +// MaxPooledBufferSize determines the maximum size of a buffer that can be +// pooled. To reduce allocations, the buffers needed during formatting (etc) +// are pooled. A very large log item will grow a buffer that could stay in +// memory indefinitely. This setting lets you control how big a pooled buffer +// can be - anything larger will be garbage collected after use. +// Defaults to 1MB. +func MaxPooledBufferSize(size int) Option { + return func(l *Logr) error { + l.options.maxPooledBuffer = size + return nil + } +} + +// DisableBufferPool when true disables the buffer pool. See MaxPooledBuffer. +func DisableBufferPool(disable bool) Option { + return func(l *Logr) error { + l.options.disableBufferPool = disable + return nil + } +} + +// SetMetricsCollector enables metrics collection by supplying a MetricsCollector. +// The MetricsCollector provides counters and gauges that are updated by log targets. +// `updateFreqMillis` determines how often polled metrics are updated. Defaults to 15000 (15 seconds) +// and must be at least 250 so we don't peg the CPU. +func SetMetricsCollector(collector MetricsCollector, updateFreqMillis int64) Option { + return func(l *Logr) error { + if collector == nil { + return errors.New("collector cannot be nil") + } + if updateFreqMillis < 250 { + return errors.New("updateFreqMillis cannot be less than 250") + } + l.options.metricsCollector = collector + l.options.metricsUpdateFreqMillis = updateFreqMillis + return nil + } +} + +// StackFilter provides a list of package names to exclude from the top of +// stack traces. The Logr packages are automatically filtered. +func StackFilter(pkg ...string) Option { + return func(l *Logr) error { + if l.options.stackFilter == nil { + l.options.stackFilter = make(map[string]struct{}) + } + + for _, p := range pkg { + if p != "" { + l.options.stackFilter[p] = struct{}{} + } + } + return nil + } +} + +// MaxFieldLen is the maximum number of characters for a field. +// If exceeded, remaining bytes will be discarded. +// Defaults to DefaultMaxFieldLength. +func MaxFieldLen(size int) Option { + return func(l *Logr) error { + if size < 0 { + return errors.New("size cannot be less than zero") + } + l.options.maxFieldLen = size + return nil + } +} diff --git a/vendor/github.com/mattermost/logr/v2/pkg.go b/vendor/github.com/mattermost/logr/v2/pkg.go new file mode 100644 index 00000000..873b2e95 --- /dev/null +++ b/vendor/github.com/mattermost/logr/v2/pkg.go @@ -0,0 +1,57 @@ +package logr + +import ( + "runtime" + "strings" + "sync" +) + +const ( + maximumStackDepth int = 30 +) + +var ( + logrPkg string + pkgCalcOnce sync.Once +) + +// GetPackageName returns the root package name of Logr. +func GetLogrPackageName() string { + pkgCalcOnce.Do(func() { + logrPkg = GetPackageName("GetLogrPackageName") + }) + return logrPkg +} + +// GetPackageName returns the package name of the caller. +// `callingFuncName` should be the name of the calling function and +// should be unique enough not to collide with any runtime methods. +func GetPackageName(callingFuncName string) string { + var pkgName string + + pcs := make([]uintptr, maximumStackDepth) + _ = runtime.Callers(0, pcs) + + for _, pc := range pcs { + funcName := runtime.FuncForPC(pc).Name() + if strings.Contains(funcName, callingFuncName) { + pkgName = ResolvePackageName(funcName) + break + } + } + return pkgName +} + +// ResolvePackageName reduces a fully qualified function name to the package name +func ResolvePackageName(f string) string { + for { + lastPeriod := strings.LastIndex(f, ".") + lastSlash := strings.LastIndex(f, "/") + if lastPeriod > lastSlash { + f = f[:lastPeriod] + } else { + break + } + } + return f +} diff --git a/vendor/github.com/mattermost/logr/v2/stdlogger.go b/vendor/github.com/mattermost/logr/v2/stdlogger.go new file mode 100644 index 00000000..50171b3d --- /dev/null +++ b/vendor/github.com/mattermost/logr/v2/stdlogger.go @@ -0,0 +1,56 @@ +package logr + +import ( + "log" + "os" + "strings" +) + +// NewStdLogger creates a standard logger backed by a Logr instance. +// All log records are emitted with the specified log level. +func NewStdLogger(level Level, logger Logger) *log.Logger { + adapter := newStdLogAdapter(logger, level) + return log.New(adapter, "", 0) +} + +// RedirectStdLog redirects output from the standard library's package-global logger +// to this logger at the specified level and with zero or more Field's. Since Logr already +// handles caller annotations, timestamps, etc., it automatically disables the standard +// library's annotations and prefixing. +// A function is returned that restores the original prefix and flags and resets the standard +// library's output to os.Stderr. +func (lgr *Logr) RedirectStdLog(level Level, fields ...Field) func() { + flags := log.Flags() + prefix := log.Prefix() + log.SetFlags(0) + log.SetPrefix("") + + logger := lgr.NewLogger().With(fields...) + adapter := newStdLogAdapter(logger, level) + log.SetOutput(adapter) + + return func() { + log.SetFlags(flags) + log.SetPrefix(prefix) + log.SetOutput(os.Stderr) + } +} + +type stdLogAdapter struct { + logger Logger + level Level +} + +func newStdLogAdapter(logger Logger, level Level) *stdLogAdapter { + return &stdLogAdapter{ + logger: logger, + level: level, + } +} + +// Write implements io.Writer +func (a *stdLogAdapter) Write(p []byte) (int, error) { + s := strings.TrimSpace(string(p)) + a.logger.Log(a.level, s) + return len(p), nil +} diff --git a/vendor/github.com/mattermost/logr/v2/sugar.go b/vendor/github.com/mattermost/logr/v2/sugar.go new file mode 100644 index 00000000..882f0fd5 --- /dev/null +++ b/vendor/github.com/mattermost/logr/v2/sugar.go @@ -0,0 +1,197 @@ +package logr + +import ( + "fmt" +) + +// Sugar provides a less structured API for logging. +type Sugar struct { + logger Logger +} + +func (s Sugar) sugarLog(lvl Level, msg string, args ...interface{}) { + if s.logger.IsLevelEnabled(lvl) { + fields := make([]Field, 0, len(args)) + for _, arg := range args { + fields = append(fields, Any("", arg)) + } + s.logger.Log(lvl, msg, fields...) + } +} + +// Trace is a convenience method equivalent to `Log(TraceLevel, msg, args...)`. +func (s Sugar) Trace(msg string, args ...interface{}) { + s.sugarLog(Trace, msg, args...) +} + +// Debug is a convenience method equivalent to `Log(DebugLevel, msg, args...)`. +func (s Sugar) Debug(msg string, args ...interface{}) { + s.sugarLog(Debug, msg, args...) +} + +// Print ensures compatibility with std lib logger. +func (s Sugar) Print(msg string, args ...interface{}) { + s.Info(msg, args...) +} + +// Info is a convenience method equivalent to `Log(InfoLevel, msg, args...)`. +func (s Sugar) Info(msg string, args ...interface{}) { + s.sugarLog(Info, msg, args...) +} + +// Warn is a convenience method equivalent to `Log(WarnLevel, msg, args...)`. +func (s Sugar) Warn(msg string, args ...interface{}) { + s.sugarLog(Warn, msg, args...) +} + +// Error is a convenience method equivalent to `Log(ErrorLevel, msg, args...)`. +func (s Sugar) Error(msg string, args ...interface{}) { + s.sugarLog(Error, msg, args...) +} + +// Fatal is a convenience method equivalent to `Log(FatalLevel, msg, args...)` +func (s Sugar) Fatal(msg string, args ...interface{}) { + s.sugarLog(Fatal, msg, args...) +} + +// Panic is a convenience method equivalent to `Log(PanicLevel, msg, args...)` +func (s Sugar) Panic(msg string, args ...interface{}) { + s.sugarLog(Panic, msg, args...) +} + +// +// Printf style +// + +// Logf checks that the level matches one or more targets, and +// if so, generates a log record that is added to the main +// queue (channel). Arguments are handled in the manner of fmt.Printf. +func (s Sugar) Logf(lvl Level, format string, args ...interface{}) { + if s.logger.IsLevelEnabled(lvl) { + var msg string + if format == "" { + msg = fmt.Sprint(args...) + } else { + msg = fmt.Sprintf(format, args...) + } + s.logger.Log(lvl, msg) + } +} + +// Tracef is a convenience method equivalent to `Logf(TraceLevel, args...)`. +func (s Sugar) Tracef(format string, args ...interface{}) { + s.Logf(Trace, format, args...) +} + +// Debugf is a convenience method equivalent to `Logf(DebugLevel, args...)`. +func (s Sugar) Debugf(format string, args ...interface{}) { + s.Logf(Debug, format, args...) +} + +// Infof is a convenience method equivalent to `Logf(InfoLevel, args...)`. +func (s Sugar) Infof(format string, args ...interface{}) { + s.Logf(Info, format, args...) +} + +// Printf ensures compatibility with std lib logger. +func (s Sugar) Printf(format string, args ...interface{}) { + s.Infof(format, args...) +} + +// Warnf is a convenience method equivalent to `Logf(WarnLevel, args...)`. +func (s Sugar) Warnf(format string, args ...interface{}) { + s.Logf(Warn, format, args...) +} + +// Errorf is a convenience method equivalent to `Logf(ErrorLevel, args...)`. +func (s Sugar) Errorf(format string, args ...interface{}) { + s.Logf(Error, format, args...) +} + +// Fatalf is a convenience method equivalent to `Logf(FatalLevel, args...)` +func (s Sugar) Fatalf(format string, args ...interface{}) { + s.Logf(Fatal, format, args...) +} + +// Panicf is a convenience method equivalent to `Logf(PanicLevel, args...)` +func (s Sugar) Panicf(format string, args ...interface{}) { + s.Logf(Panic, format, args...) +} + +// +// K/V style +// + +// With returns a new Sugar logger with the specified key/value pairs added to the +// fields list. +func (s Sugar) With(keyValuePairs ...interface{}) Sugar { + return s.logger.With(s.argsToFields(keyValuePairs)...).Sugar() +} + +// Tracew outputs at trace level with the specified key/value pairs converted to fields. +func (s Sugar) Tracew(msg string, keyValuePairs ...interface{}) { + s.logger.Log(Trace, msg, s.argsToFields(keyValuePairs)...) +} + +// Debugw outputs at debug level with the specified key/value pairs converted to fields. +func (s Sugar) Debugw(msg string, keyValuePairs ...interface{}) { + s.logger.Log(Debug, msg, s.argsToFields(keyValuePairs)...) +} + +// Infow outputs at info level with the specified key/value pairs converted to fields. +func (s Sugar) Infow(msg string, keyValuePairs ...interface{}) { + s.logger.Log(Info, msg, s.argsToFields(keyValuePairs)...) +} + +// Warnw outputs at warn level with the specified key/value pairs converted to fields. +func (s Sugar) Warnw(msg string, keyValuePairs ...interface{}) { + s.logger.Log(Warn, msg, s.argsToFields(keyValuePairs)...) +} + +// Errorw outputs at error level with the specified key/value pairs converted to fields. +func (s Sugar) Errorw(msg string, keyValuePairs ...interface{}) { + s.logger.Log(Error, msg, s.argsToFields(keyValuePairs)...) +} + +// Fatalw outputs at fatal level with the specified key/value pairs converted to fields. +func (s Sugar) Fatalw(msg string, keyValuePairs ...interface{}) { + s.logger.Log(Fatal, msg, s.argsToFields(keyValuePairs)...) +} + +// Panicw outputs at panic level with the specified key/value pairs converted to fields. +func (s Sugar) Panicw(msg string, keyValuePairs ...interface{}) { + s.logger.Log(Panic, msg, s.argsToFields(keyValuePairs)...) +} + +// argsToFields converts an array of args, possibly containing name/value pairs +// into a []Field. +func (s Sugar) argsToFields(keyValuePairs []interface{}) []Field { + if len(keyValuePairs) == 0 { + return nil + } + + fields := make([]Field, 0, len(keyValuePairs)) + count := len(keyValuePairs) + + for i := 0; i < count; { + if fld, ok := keyValuePairs[i].(Field); ok { + fields = append(fields, fld) + i++ + continue + } + + if i == count-1 { + s.logger.Error("invalid key/value pair", Any("arg", keyValuePairs[i])) + break + } + + // we should have a key/value pair now. The key must be a string. + if key, ok := keyValuePairs[i].(string); !ok { + s.logger.Error("invalid key for key/value pair", Int("pos", i)) + } else { + fields = append(fields, Any(key, keyValuePairs[i+1])) + } + i += 2 + } + return fields +} diff --git a/vendor/github.com/mattermost/logr/v2/target.go b/vendor/github.com/mattermost/logr/v2/target.go new file mode 100644 index 00000000..fa0a9320 --- /dev/null +++ b/vendor/github.com/mattermost/logr/v2/target.go @@ -0,0 +1,304 @@ +package logr + +import ( + "context" + "errors" + "fmt" + "os" + "sync/atomic" + "time" +) + +// Target represents a destination for log records such as file, +// database, TCP socket, etc. +type Target interface { + // Init is called once to initialize the target. + Init() error + + // Write outputs to this target's destination. + Write(p []byte, rec *LogRec) (int, error) + + // Shutdown is called once to free/close any resources. + // Target queue is already drained when this is called. + Shutdown() error +} + +type targetMetrics struct { + queueSizeGauge Gauge + loggedCounter Counter + errorCounter Counter + droppedCounter Counter + blockedCounter Counter +} + +type targetHostOptions struct { + name string + filter Filter + formatter Formatter + maxQueueSize int + metrics *metrics +} + +// TargetHost hosts and manages the lifecycle of a target. +// Incoming log records are queued and formatted before +// being passed to the target. +type TargetHost struct { + target Target + name string + + filter Filter + formatter Formatter + + in chan *LogRec + quit chan struct{} // closed by Shutdown to exit read loop + done chan struct{} // closed when read loop exited + targetMetrics *targetMetrics + + shutdown int32 +} + +func newTargetHost(target Target, options targetHostOptions) (*TargetHost, error) { + host := &TargetHost{ + target: target, + name: options.name, + filter: options.filter, + formatter: options.formatter, + in: make(chan *LogRec, options.maxQueueSize), + quit: make(chan struct{}), + done: make(chan struct{}), + } + + if host.name == "" { + host.name = fmt.Sprintf("%T", target) + } + + if host.filter == nil { + host.filter = &StdFilter{Lvl: Fatal} + } + if host.formatter == nil { + host.formatter = &DefaultFormatter{} + } + + err := host.initMetrics(options.metrics) + if err != nil { + return nil, err + } + + err = target.Init() + if err != nil { + return nil, err + } + + go host.start() + + return host, nil +} + +func (h *TargetHost) initMetrics(metrics *metrics) error { + if metrics == nil { + return nil + } + + var err error + tmetrics := &targetMetrics{} + + if tmetrics.queueSizeGauge, err = metrics.collector.QueueSizeGauge(h.name); err != nil { + return err + } + if tmetrics.loggedCounter, err = metrics.collector.LoggedCounter(h.name); err != nil { + return err + } + if tmetrics.errorCounter, err = metrics.collector.ErrorCounter(h.name); err != nil { + return err + } + if tmetrics.droppedCounter, err = metrics.collector.DroppedCounter(h.name); err != nil { + return err + } + if tmetrics.blockedCounter, err = metrics.collector.BlockedCounter(h.name); err != nil { + return err + } + h.targetMetrics = tmetrics + + updateFreqMillis := metrics.updateFreqMillis + if updateFreqMillis == 0 { + updateFreqMillis = DefMetricsUpdateFreqMillis + } + if updateFreqMillis < 250 { + updateFreqMillis = 250 // don't peg the CPU + } + + go h.startMetricsUpdater(updateFreqMillis) + return nil +} + +// IsLevelEnabled returns true if this target should emit logs for the specified level. +func (h *TargetHost) IsLevelEnabled(lvl Level) (enabled bool, level Level) { + level, enabled = h.filter.GetEnabledLevel(lvl) + return enabled, level +} + +// Shutdown stops processing log records after making best +// effort to flush queue. +func (h *TargetHost) Shutdown(ctx context.Context) error { + if atomic.SwapInt32(&h.shutdown, 1) != 0 { + return errors.New("targetHost shutdown called more than once") + } + + close(h.quit) + + // No more records can be accepted; now wait for read loop to exit. + select { + case <-ctx.Done(): + case <-h.done: + } + + // b.in channel should now be drained. + return h.target.Shutdown() +} + +// Log queues a log record to be output to this target's destination. +func (h *TargetHost) Log(rec *LogRec) { + if atomic.LoadInt32(&h.shutdown) != 0 { + return + } + + lgr := rec.Logger().Logr() + select { + case h.in <- rec: + default: + handler := lgr.options.onTargetQueueFull + if handler != nil && handler(h.target, rec, cap(h.in)) { + h.incDroppedCounter() + return // drop the record + } + h.incBlockedCounter() + + select { + case <-time.After(lgr.options.enqueueTimeout): + lgr.ReportError(fmt.Errorf("target enqueue timeout for log rec [%v]", rec)) + case h.in <- rec: // block until success or timeout + } + } +} + +func (h *TargetHost) setQueueSizeGauge(val float64) { + if h.targetMetrics != nil { + h.targetMetrics.queueSizeGauge.Set(val) + } +} + +func (h *TargetHost) incLoggedCounter() { + if h.targetMetrics != nil { + h.targetMetrics.loggedCounter.Inc() + } +} + +func (h *TargetHost) incErrorCounter() { + if h.targetMetrics != nil { + h.targetMetrics.errorCounter.Inc() + } +} + +func (h *TargetHost) incDroppedCounter() { + if h.targetMetrics != nil { + h.targetMetrics.droppedCounter.Inc() + } +} + +func (h *TargetHost) incBlockedCounter() { + if h.targetMetrics != nil { + h.targetMetrics.blockedCounter.Inc() + } +} + +// String returns a name for this target. +func (h *TargetHost) String() string { + return h.name +} + +// start accepts log records via In channel and writes to the +// supplied target, until Done channel signaled. +func (h *TargetHost) start() { + defer func() { + if r := recover(); r != nil { + fmt.Fprintln(os.Stderr, "TargetHost.start -- ", r) + go h.start() + } else { + close(h.done) + } + }() + + for { + var rec *LogRec + select { + case rec = <-h.in: + if rec.flush != nil { + h.flush(rec.flush) + } else { + err := h.writeRec(rec) + if err != nil { + h.incErrorCounter() + rec.Logger().Logr().ReportError(err) + } else { + h.incLoggedCounter() + } + } + case <-h.quit: + return + } + } +} + +func (h *TargetHost) writeRec(rec *LogRec) error { + level, enabled := h.filter.GetEnabledLevel(rec.Level()) + if !enabled { + // how did we get here? + return fmt.Errorf("level %s not enabled for target %s", rec.Level().Name, h.name) + } + + buf := rec.logger.lgr.BorrowBuffer() + defer rec.logger.lgr.ReleaseBuffer(buf) + + buf, err := h.formatter.Format(rec, level, buf) + if err != nil { + return err + } + + _, err = h.target.Write(buf.Bytes(), rec) + return err +} + +// startMetricsUpdater updates the metrics for any polled values every `updateFreqMillis` seconds until +// target is shut down. +func (h *TargetHost) startMetricsUpdater(updateFreqMillis int64) { + for { + select { + case <-h.done: + return + case <-time.After(time.Duration(updateFreqMillis) * time.Millisecond): + h.setQueueSizeGauge(float64(len(h.in))) + } + } +} + +// flush drains the queue and notifies when done. +func (h *TargetHost) flush(done chan<- struct{}) { + for { + var rec *LogRec + var err error + select { + case rec = <-h.in: + // ignore any redundant flush records. + if rec.flush == nil { + err = h.writeRec(rec) + if err != nil { + h.incErrorCounter() + rec.Logger().Logr().ReportError(err) + } + } + default: + done <- struct{}{} + return + } + } +} diff --git a/vendor/github.com/mattermost/logr/v2/targets/file.go b/vendor/github.com/mattermost/logr/v2/targets/file.go new file mode 100644 index 00000000..71133fac --- /dev/null +++ b/vendor/github.com/mattermost/logr/v2/targets/file.go @@ -0,0 +1,78 @@ +package targets + +import ( + "errors" + "io" + + "github.com/mattermost/logr/v2" + "gopkg.in/natefinch/lumberjack.v2" +) + +type FileOptions struct { + // Filename is the file to write logs to. Backup log files will be retained + // in the same directory. It uses -lumberjack.log in + // os.TempDir() if empty. + Filename string `json:"filename"` + + // MaxSize is the maximum size in megabytes of the log file before it gets + // rotated. It defaults to 100 megabytes. + MaxSize int `json:"max_size"` + + // MaxAge is the maximum number of days to retain old log files based on the + // timestamp encoded in their filename. Note that a day is defined as 24 + // hours and may not exactly correspond to calendar days due to daylight + // savings, leap seconds, etc. The default is not to remove old log files + // based on age. + MaxAge int `json:"max_age"` + + // MaxBackups is the maximum number of old log files to retain. The default + // is to retain all old log files (though MaxAge may still cause them to get + // deleted.) + MaxBackups int `json:"max_backups"` + + // Compress determines if the rotated log files should be compressed + // using gzip. The default is not to perform compression. + Compress bool `json:"compress"` +} + +func (fo FileOptions) CheckValid() error { + if fo.Filename == "" { + return errors.New("filename cannot be empty") + } + return nil +} + +// File outputs log records to a file which can be log rotated based on size or age. +// Uses `https://github.com/natefinch/lumberjack` for rotation. +type File struct { + out io.WriteCloser +} + +// NewFileTarget creates a target capable of outputting log records to a rotated file. +func NewFileTarget(opts FileOptions) *File { + lumber := &lumberjack.Logger{ + Filename: opts.Filename, + MaxSize: opts.MaxSize, + MaxBackups: opts.MaxBackups, + MaxAge: opts.MaxAge, + Compress: opts.Compress, + } + f := &File{out: lumber} + return f +} + +// Init is called once to initialize the target. +func (f *File) Init() error { + return nil +} + +// Write outputs bytes to this file target. +func (f *File) Write(p []byte, rec *logr.LogRec) (int, error) { + return f.out.Write(p) +} + +// Shutdown is called once to free/close any resources. +// Target queue is already drained when this is called. +func (f *File) Shutdown() error { + return f.out.Close() +} diff --git a/vendor/github.com/mattermost/logr/v2/targets/syslog.go b/vendor/github.com/mattermost/logr/v2/targets/syslog.go new file mode 100644 index 00000000..eaeacb3a --- /dev/null +++ b/vendor/github.com/mattermost/logr/v2/targets/syslog.go @@ -0,0 +1,119 @@ +//go:build !windows && !nacl && !plan9 +// +build !windows,!nacl,!plan9 + +package targets + +import ( + "crypto/tls" + "errors" + "fmt" + + "github.com/mattermost/logr/v2" + syslog "github.com/wiggin77/srslog" +) + +// Syslog outputs log records to local or remote syslog. +type Syslog struct { + params *SyslogOptions + writer *syslog.Writer +} + +// SyslogOptions provides parameters for dialing a syslog daemon. +type SyslogOptions struct { + IP string `json:"ip,omitempty"` // deprecated (use Host instead) + Host string `json:"host"` + Port int `json:"port"` + TLS bool `json:"tls"` + Cert string `json:"cert"` + Insecure bool `json:"insecure"` + Tag string `json:"tag"` +} + +func (so SyslogOptions) CheckValid() error { + if so.Host == "" && so.IP == "" { + return errors.New("missing host") + } + if so.Port == 0 { + return errors.New("missing port") + } + return nil +} + +// NewSyslogTarget creates a target capable of outputting log records to remote or local syslog, with or without TLS. +func NewSyslogTarget(params *SyslogOptions) (*Syslog, error) { + if params == nil { + return nil, errors.New("params cannot be nil") + } + + s := &Syslog{ + params: params, + } + return s, nil +} + +// Init is called once to initialize the target. +func (s *Syslog) Init() error { + network := "tcp" + var config *tls.Config + + host := s.params.Host + if host == "" { + host = s.params.IP + } + + if s.params.TLS { + network = "tcp+tls" + config = &tls.Config{InsecureSkipVerify: s.params.Insecure} + + pool, err := GetCertPoolOrNil(s.params.Cert) + if err != nil { + return err + } + if pool != nil { + config.RootCAs = pool + } + } + raddr := fmt.Sprintf("%s:%d", host, s.params.Port) + if raddr == ":0" { + // If no IP:port provided then connect to local syslog. + raddr = "" + network = "" + } + + var err error + s.writer, err = syslog.DialWithTLSConfig(network, raddr, syslog.LOG_INFO, s.params.Tag, config) + return err +} + +// Write outputs bytes to this file target. +func (s *Syslog) Write(p []byte, rec *logr.LogRec) (int, error) { + txt := string(p) + n := len(txt) + var err error + + switch rec.Level() { + case logr.Panic, logr.Fatal: + err = s.writer.Crit(txt) + case logr.Error: + err = s.writer.Err(txt) + case logr.Warn: + err = s.writer.Warning(txt) + case logr.Debug, logr.Trace: + err = s.writer.Debug(txt) + default: + // logr.Info plus all custom levels. + err = s.writer.Info(txt) + } + + if err != nil { + n = 0 + // syslog writer will try to reconnect. + } + return n, err +} + +// Shutdown is called once to free/close any resources. +// Target queue is already drained when this is called. +func (s *Syslog) Shutdown() error { + return s.writer.Close() +} diff --git a/vendor/github.com/mattermost/logr/v2/targets/syslog_unsupported.go b/vendor/github.com/mattermost/logr/v2/targets/syslog_unsupported.go new file mode 100644 index 00000000..e4086e96 --- /dev/null +++ b/vendor/github.com/mattermost/logr/v2/targets/syslog_unsupported.go @@ -0,0 +1,56 @@ +// +build windows nacl plan9 + +package targets + +import ( + "errors" + + "github.com/mattermost/logr/v2" + syslog "github.com/wiggin77/srslog" +) + +const ( + unsupported = "Syslog target is not supported on this platform." +) + +// Syslog outputs log records to local or remote syslog. +type Syslog struct { + params *SyslogOptions + writer *syslog.Writer +} + +// SyslogOptions provides parameters for dialing a syslog daemon. +type SyslogOptions struct { + IP string `json:"ip,omitempty"` // deprecated + Host string `json:"host"` + Port int `json:"port"` + TLS bool `json:"tls"` + Cert string `json:"cert"` + Insecure bool `json:"insecure"` + Tag string `json:"tag"` +} + +func (so SyslogOptions) CheckValid() error { + return errors.New(unsupported) +} + +// NewSyslogTarget creates a target capable of outputting log records to remote or local syslog, with or without TLS. +func NewSyslogTarget(params *SyslogOptions) (*Syslog, error) { + return nil, errors.New(unsupported) +} + +// Init is called once to initialize the target. +func (s *Syslog) Init() error { + return errors.New(unsupported) +} + +// Write outputs bytes to this file target. +func (s *Syslog) Write(p []byte, rec *logr.LogRec) (int, error) { + return 0, errors.New(unsupported) +} + +// Shutdown is called once to free/close any resources. +// Target queue is already drained when this is called. +func (s *Syslog) Shutdown() error { + return errors.New(unsupported) +} diff --git a/vendor/github.com/mattermost/logr/v2/targets/tcp.go b/vendor/github.com/mattermost/logr/v2/targets/tcp.go new file mode 100644 index 00000000..27ff6de8 --- /dev/null +++ b/vendor/github.com/mattermost/logr/v2/targets/tcp.go @@ -0,0 +1,253 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package targets + +import ( + "context" + "crypto/tls" + "errors" + "fmt" + "net" + "sync" + "time" + + "github.com/mattermost/logr/v2" +) + +const ( + DialTimeoutSecs = 30 + WriteTimeoutSecs = 30 + RetryBackoffMillis int64 = 100 + MaxRetryBackoffMillis int64 = 30 * 1000 // 30 seconds +) + +// Tcp outputs log records to raw socket server. +type Tcp struct { + options *TcpOptions + addy string + + mutex sync.Mutex + conn net.Conn + monitor chan struct{} + shutdown chan struct{} +} + +// TcpOptions provides parameters for dialing a socket server. +type TcpOptions struct { + IP string `json:"ip,omitempty"` // deprecated + Host string `json:"host"` + Port int `json:"port"` + TLS bool `json:"tls"` + Cert string `json:"cert"` + Insecure bool `json:"insecure"` +} + +func (to TcpOptions) CheckValid() error { + if to.Host == "" && to.IP == "" { + return errors.New("missing host") + } + if to.Port == 0 { + return errors.New("missing port") + } + return nil +} + +// NewTcpTarget creates a target capable of outputting log records to a raw socket, with or without TLS. +func NewTcpTarget(options *TcpOptions) *Tcp { + tcp := &Tcp{ + options: options, + addy: fmt.Sprintf("%s:%d", options.IP, options.Port), + monitor: make(chan struct{}), + shutdown: make(chan struct{}), + } + return tcp +} + +// Init is called once to initialize the target. +func (tcp *Tcp) Init() error { + return nil +} + +// getConn provides a net.Conn. If a connection already exists, it is returned immediately, +// otherwise this method blocks until a new connection is created, timeout or shutdown. +func (tcp *Tcp) getConn(reporter func(err interface{})) (net.Conn, error) { + tcp.mutex.Lock() + defer tcp.mutex.Unlock() + + if tcp.conn != nil { + return tcp.conn, nil + } + + type result struct { + conn net.Conn + err error + } + + connChan := make(chan result) + ctx, cancel := context.WithTimeout(context.Background(), time.Second*DialTimeoutSecs) + defer cancel() + + go func(ctx context.Context, ch chan result) { + conn, err := tcp.dial(ctx) + if err != nil { + reporter(fmt.Errorf("log target %s connection error: %w", tcp.String(), err)) + ch <- result{conn: nil, err: err} + return + } + tcp.conn = conn + tcp.monitor = make(chan struct{}) + go monitor(tcp.conn, tcp.monitor) + ch <- result{conn: conn, err: err} + }(ctx, connChan) + + select { + case <-tcp.shutdown: + return nil, errors.New("shutdown") + case res := <-connChan: + return res.conn, res.err + } +} + +// dial connects to a TCP socket, and optionally performs a TLS handshake. +// A non-nil context must be provided which can cancel the dial. +func (tcp *Tcp) dial(ctx context.Context) (net.Conn, error) { + var dialer net.Dialer + dialer.Timeout = time.Second * DialTimeoutSecs + conn, err := dialer.DialContext(ctx, "tcp", fmt.Sprintf("%s:%d", tcp.options.IP, tcp.options.Port)) + if err != nil { + return nil, err + } + + if !tcp.options.TLS { + return conn, nil + } + + tlsconfig := &tls.Config{ + ServerName: tcp.options.IP, + InsecureSkipVerify: tcp.options.Insecure, + } + + pool, err := GetCertPoolOrNil(tcp.options.Cert) + if err != nil { + return nil, err + } + if pool != nil { + tlsconfig.RootCAs = pool + } + + tlsConn := tls.Client(conn, tlsconfig) + if err := tlsConn.Handshake(); err != nil { + return nil, err + } + return tlsConn, nil +} + +func (tcp *Tcp) close() error { + tcp.mutex.Lock() + defer tcp.mutex.Unlock() + + var err error + if tcp.conn != nil { + close(tcp.monitor) + err = tcp.conn.Close() + tcp.conn = nil + } + return err +} + +// Shutdown stops processing log records after making best effort to flush queue. +func (tcp *Tcp) Shutdown() error { + err := tcp.close() + close(tcp.shutdown) + return err +} + +// Write converts the log record to bytes, via the Formatter, and outputs to the socket. +// Called by dedicated target goroutine and will block until success or shutdown. +func (tcp *Tcp) Write(p []byte, rec *logr.LogRec) (int, error) { + try := 1 + backoff := RetryBackoffMillis + for { + select { + case <-tcp.shutdown: + return 0, nil + default: + } + + reporter := rec.Logger().Logr().ReportError + + conn, err := tcp.getConn(reporter) + if err != nil { + reporter(fmt.Errorf("log target %s connection error: %w", tcp.String(), err)) + backoff = tcp.sleep(backoff) + continue + } + + err = conn.SetWriteDeadline(time.Now().Add(time.Second * WriteTimeoutSecs)) + if err != nil { + reporter(fmt.Errorf("log target %s set write deadline error: %w", tcp.String(), err)) + } + + count, err := conn.Write(p) + if err == nil { + return count, nil + } + + reporter(fmt.Errorf("log target %s write error: %w", tcp.String(), err)) + + _ = tcp.close() + + backoff = tcp.sleep(backoff) + try++ + } +} + +// monitor continuously tries to read from the connection to detect socket close. +// This is needed because TCP target uses a write only socket and Linux systems +// take a long time to detect a loss of connectivity on a socket when only writing; +// the writes simply fail without an error returned. +func monitor(conn net.Conn, done <-chan struct{}) { + buf := make([]byte, 1) + for { + select { + case <-done: + return + case <-time.After(1 * time.Second): + } + + err := conn.SetReadDeadline(time.Now().Add(time.Second * 30)) + if err != nil { + continue + } + + _, err = conn.Read(buf) + + if errt, ok := err.(net.Error); ok && errt.Timeout() { + // read timeout is expected, keep looping. + continue + } + + // Any other error closes the connection, forcing a reconnect. + conn.Close() + return + } +} + +// String returns a string representation of this target. +func (tcp *Tcp) String() string { + return fmt.Sprintf("TcpTarget[%s:%d]", tcp.options.IP, tcp.options.Port) +} + +func (tcp *Tcp) sleep(backoff int64) int64 { + select { + case <-tcp.shutdown: + case <-time.After(time.Millisecond * time.Duration(backoff)): + } + + nextBackoff := backoff + (backoff >> 1) + if nextBackoff > MaxRetryBackoffMillis { + nextBackoff = MaxRetryBackoffMillis + } + return nextBackoff +} diff --git a/vendor/github.com/mattermost/logr/v2/targets/test-tls-client-cert.pem b/vendor/github.com/mattermost/logr/v2/targets/test-tls-client-cert.pem new file mode 100644 index 00000000..6ce8d042 --- /dev/null +++ b/vendor/github.com/mattermost/logr/v2/targets/test-tls-client-cert.pem @@ -0,0 +1,43 @@ +-----BEGIN CERTIFICATE----- +MIIDjzCCAnegAwIBAgIRAPYfRSwdzKopBKxYxKqslJUwDQYJKoZIhvcNAQELBQAw +JzElMCMGA1UEAwwcTWF0dGVybW9zdCwgSW5jLiBJbnRlcm5hbCBDQTAeFw0xOTAz +MjIwMDE0MTVaFw0yMjAzMDYwMDE0MTVaMDsxOTA3BgNVBAMTME1hdHRlcm1vc3Qs +IEluYy4gSW50ZXJuYWwgSW50ZXJtZWRpYXRlIEF1dGhvcml0eTCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAMjliRdmvnNL4u/Jr/M2dPwQmTJXEBY/Vq9Q +vAU52X3tRMCPxcaFz+x6ftuvdO2NdohXGAmtx9QU5LZcvFeTDpoVEBo9A+4jtLvD +DZYaTNLpJmoSoJHaDbdWX+OAOqyDiWS741LuiMKWHhew9QOisat2ZINPxjmAd9wE +xthTMgzsv7MUqnMer8U5OGQ0Qy7wAmNRc+2K3qPwkxe2RUvcte50DUFNgxEginsh +vrkOXR383vUCZfu72qu8oggjiQpyTllu5je2Ap6JLjYLkEMiMqrYADuWor/ZHwa6 +WrFqVETxWfAV5u9Eh0wZM/KKYwRQuw9y+Nans77FmUl1tVWWNN8CAwEAAaOBoTCB +njAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBQY4Uqswyr2hO/HetZt2RDxJdTIPjBi +BgNVHSMEWzBZgBRFZXVg2Z5tNIsWeWjBLEy2yzKbMKErpCkwJzElMCMGA1UEAwwc +TWF0dGVybW9zdCwgSW5jLiBJbnRlcm5hbCBDQYIUEifGUOM+bIFZo1tkjZB5YGBr +0xEwCwYDVR0PBAQDAgEGMA0GCSqGSIb3DQEBCwUAA4IBAQAEdexL30Q0zBHmPAH8 +LhdK7dbzW1CmILbxRZlKAwRN+hKRXiMW3MHIkhNuoV9Aev602Q+ja4lWsRi/ktOL +ni1FWx5gSScgdG8JGj47dOmoT3vXKX7+umiv4rQLPDl9/DKMuv204OYJq6VT+uNU +6C6kL157jGJEO76H4fMZ8oYsD7Sq0zjiNKtuCYii0ngH3j3gB1jACLqRgveU7MdT +pqOV2KfY31+h8VBtkUvljNztQ9xNY8Fjmt0SMf7E3FaUcaar3ZCr70G5aU3dKbe7 +47vGOBa5tCqw4YK0jgDKid3IJQul9a3J1mSsH8Wy3to9cAV4KGZBQLnzCX15a/+v +3yVh +-----END CERTIFICATE----- +-----BEGIN CERTIFICATE----- +MIIDfjCCAmagAwIBAgIUEifGUOM+bIFZo1tkjZB5YGBr0xEwDQYJKoZIhvcNAQEL +BQAwJzElMCMGA1UEAwwcTWF0dGVybW9zdCwgSW5jLiBJbnRlcm5hbCBDQTAeFw0x +OTAzMjEyMTI4NDNaFw0yOTAzMTgyMTI4NDNaMCcxJTAjBgNVBAMMHE1hdHRlcm1v +c3QsIEluYy4gSW50ZXJuYWwgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK +AoIBAQDH0Xq5rMBGpKOVWTpb5MnaJIWFP/vOtvEk+7hVrfOfe1/5x0Kk3UgAHj85 +otaEZD1Lhn/JLkEqCiE/UXMJFwJDlNcO4CkdKBSpYX4bKAqy5q/X3QwioMSNpJG1 ++YYrNGBH0sgKcKjyCaLhmqYLD0xZDVOmWIYBU9jUPyXw5U0tnsVrTqGMxVkm1xCY +krCWN1ZoUrLvL0MCZc5qpxoPTopr9UO9cqSBSuy6BVWVuEWBZhpqHt+ul8VxhzzY +q1k4l7r2qw+/wm1iJBedTeBVeWNag8JaVfLgu+/W7oJVlPO32Po7pnvHp8iJ3b4K +zXyVHaTX4S6Em+6LV8855TYrShzlAgMBAAGjgaEwgZ4wHQYDVR0OBBYEFEVldWDZ +nm00ixZ5aMEsTLbLMpswMGIGA1UdIwRbMFmAFEVldWDZnm00ixZ5aMEsTLbLMpsw +oSukKTAnMSUwIwYDVQQDDBxNYXR0ZXJtb3N0LCBJbmMuIEludGVybmFsIENBghQS +J8ZQ4z5sgVmjW2SNkHlgYGvTETAMBgNVHRMEBTADAQH/MAsGA1UdDwQEAwIBBjAN +BgkqhkiG9w0BAQsFAAOCAQEAPiCWFmopyAkY2T3Zyo4yaRPhX1+VOTMKJtY6EUhq +/GHz6kzEyvCUBf0N892cibGxekrEoItY9NqO6RQRfowg+Gn5kc13z4NyL2W8/eoT +Xy0ZvfaQbU++fQ6pVtWtMblDMU9xiYd7/MDvJpO328l1Vhcdp8kEi+lCvpy0sCRc +PxzPhbgCMAbZEGx+4TMQd4SZKzlRxW/2fflpReh6v1Dv0VDUSYQWwsUnaLpdKHfh +a5k0vuySYcszE4YKlY0zakeFlJfp7fBp1xTwcdW8aTfw15EicPMwTc6xxA4JJUJx +cddu817n1nayK5u6r9Qh1oIVkr0nC9YELMMy4dpPgJ88SA== +-----END CERTIFICATE----- diff --git a/vendor/github.com/mattermost/logr/v2/targets/testing.go b/vendor/github.com/mattermost/logr/v2/targets/testing.go new file mode 100644 index 00000000..ea3df70c --- /dev/null +++ b/vendor/github.com/mattermost/logr/v2/targets/testing.go @@ -0,0 +1,72 @@ +package targets + +import ( + "strings" + "sync" + "testing" + + "github.com/mattermost/logr/v2" + "github.com/mattermost/logr/v2/formatters" +) + +// Testing is a simple log target that writes to a (*testing.T) log. +type Testing struct { + mux sync.Mutex + t *testing.T +} + +func NewTestingTarget(t *testing.T) *Testing { + return &Testing{ + t: t, + } +} + +// Init is called once to initialize the target. +func (tt *Testing) Init() error { + return nil +} + +// Write outputs bytes to this file target. +func (tt *Testing) Write(p []byte, rec *logr.LogRec) (int, error) { + tt.mux.Lock() + defer tt.mux.Unlock() + + if tt.t != nil { + s := strings.TrimSpace(string(p)) + tt.t.Log(s) + } + return len(p), nil +} + +// Shutdown is called once to free/close any resources. +// Target queue is already drained when this is called. +func (tt *Testing) Shutdown() error { + tt.mux.Lock() + defer tt.mux.Unlock() + + tt.t = nil + return nil +} + +// CreateTestLogger creates a logger for unit tests. Log records are output to `(*testing.T).Log`. +// A new logger is returned along with a method to shutdown the new logger. +func CreateTestLogger(t *testing.T, levels ...logr.Level) (logger logr.Logger, shutdown func() error) { + lgr, _ := logr.New() + filter := logr.NewCustomFilter(levels...) + formatter := &formatters.Plain{EnableCaller: true} + target := NewTestingTarget(t) + + if err := lgr.AddTarget(target, "test", filter, formatter, 1000); err != nil { + t.Fail() + } + shutdown = func() error { + err := lgr.Shutdown() + if err != nil { + target.mux.Lock() + target.t.Error("error shutting down test logger", err) + target.mux.Unlock() + } + return err + } + return lgr.NewLogger(), shutdown +} diff --git a/vendor/github.com/mattermost/logr/v2/targets/utils.go b/vendor/github.com/mattermost/logr/v2/targets/utils.go new file mode 100644 index 00000000..58c046c3 --- /dev/null +++ b/vendor/github.com/mattermost/logr/v2/targets/utils.go @@ -0,0 +1,50 @@ +package targets + +import ( + "crypto/x509" + "encoding/base64" + "errors" + "os" +) + +const ( + DefaultCertKey = "LOGR_DEFAULT_CERT" +) + +// GetCertPoolOrNil returns a x509.CertPool containing the cert(s) from `cert`, +// or from the certs specified by the env var `LOGR_DEFAULT_CERT`, either of which +// can be a path to a .pem or .crt file, or a base64 encoded cert. +// +// If a cert is specified by either `cert` or `LOGR_DEFAULT_CERT`, but the cert +// is invalid then an error is returned. +// +// If no certs are specified by either `cert` or `LOGR_DEFAULT_CERT`, then +// nil is returned. +func GetCertPoolOrNil(cert string) (*x509.CertPool, error) { + if cert == "" { + cert = getDefaultCert() + if cert == "" { + return nil, nil // no cert provided, not an error but no pool returned + } + } + + // first treat as a file and try to read. + serverCert, err := os.ReadFile(cert) + if err != nil { + // maybe it's a base64 encoded cert + serverCert, err = base64.StdEncoding.DecodeString(cert) + if err != nil { + return nil, errors.New("cert cannot be read") + } + } + + pool := x509.NewCertPool() + if ok := pool.AppendCertsFromPEM(serverCert); ok { + return pool, nil + } + return nil, errors.New("cannot parse cert") +} + +func getDefaultCert() string { + return os.Getenv(DefaultCertKey) +} diff --git a/vendor/github.com/mattermost/logr/v2/targets/writer.go b/vendor/github.com/mattermost/logr/v2/targets/writer.go new file mode 100644 index 00000000..d9f64d76 --- /dev/null +++ b/vendor/github.com/mattermost/logr/v2/targets/writer.go @@ -0,0 +1,38 @@ +package targets + +import ( + "io" + "io/ioutil" + + "github.com/mattermost/logr/v2" +) + +// Writer outputs log records to any `io.Writer`. +type Writer struct { + out io.Writer +} + +// NewWriterTarget creates a target capable of outputting log records to an io.Writer. +func NewWriterTarget(out io.Writer) *Writer { + if out == nil { + out = ioutil.Discard + } + w := &Writer{out: out} + return w +} + +// Init is called once to initialize the target. +func (w *Writer) Init() error { + return nil +} + +// Write outputs bytes to this file target. +func (w *Writer) Write(p []byte, rec *logr.LogRec) (int, error) { + return w.out.Write(p) +} + +// Shutdown is called once to free/close any resources. +// Target queue is already drained when this is called. +func (w *Writer) Shutdown() error { + return nil +} diff --git a/vendor/github.com/mattermost/logr/v2/timeout.go b/vendor/github.com/mattermost/logr/v2/timeout.go new file mode 100644 index 00000000..37737bcf --- /dev/null +++ b/vendor/github.com/mattermost/logr/v2/timeout.go @@ -0,0 +1,34 @@ +package logr + +import "github.com/wiggin77/merror" + +// timeoutError is returned from functions that can timeout. +type timeoutError struct { + text string +} + +// newTimeoutError returns a TimeoutError. +func newTimeoutError(text string) timeoutError { + return timeoutError{text: text} +} + +// IsTimeoutError returns true if err is a TimeoutError. +func IsTimeoutError(err error) bool { + if _, ok := err.(timeoutError); ok { + return true + } + // if a multi-error, return true if any of the errors + // are TimeoutError + if merr, ok := err.(*merror.MError); ok { + for _, e := range merr.Errors() { + if IsTimeoutError(e) { + return true + } + } + } + return false +} + +func (err timeoutError) Error() string { + return err.text +} diff --git a/vendor/github.com/mattermost/mattermost-plugin-ai/LICENSE.txt b/vendor/github.com/mattermost/mattermost-plugin-ai/LICENSE.txt new file mode 100644 index 00000000..66e750e1 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost-plugin-ai/LICENSE.txt @@ -0,0 +1,190 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +Copyright 2024 Mattermost + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/vendor/github.com/mattermost/mattermost-plugin-ai/NOTICE.txt b/vendor/github.com/mattermost/mattermost-plugin-ai/NOTICE.txt new file mode 100644 index 00000000..75997c6f --- /dev/null +++ b/vendor/github.com/mattermost/mattermost-plugin-ai/NOTICE.txt @@ -0,0 +1,4120 @@ +Mattermost Agents Plugin + +©2023-present Mattermost, Inc. All Rights Reserved. See LICENSE.txt for license information. + +NOTICES: +-------- + +This document includes a list of open source components used in the plugin, including those that have been modified. + +-------- + +## @floating-ui/react-dom-interactions + +This product contains '@floating-ui/react-dom-interactions' by atomiks. + +Floating UI Interactions for React DOM + +* HOMEPAGE: + * https://floating-ui.com/docs/react-dom-interactions + +* LICENSE: MIT + +MIT License + +Copyright (c) 2021 Floating UI contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + +--- + +## @mattermost/client + +This product contains '@mattermost/client'. + +JavaScript/TypeScript client for Mattermost + +* HOMEPAGE: + * https://github.com/mattermost/mattermost/tree/master/webapp/platform/client#readme + +* LICENSE: MIT + +Mattermost Licensing + +SOFTWARE LICENSING + +You are licensed to use compiled versions of the Mattermost platform produced by Mattermost, Inc. under an MIT LICENSE + +- See MIT-COMPILED-LICENSE.md included in compiled versions for details + +You may be licensed to use source code to create compiled versions not produced by Mattermost, Inc. in one of two ways: + +1. Under the Free Software Foundation’s GNU AGPL v3.0, subject to the exceptions outlined in this policy; or +2. Under a commercial license available from Mattermost, Inc. by contacting commercial@mattermost.com + +You are licensed to use the source code in Admin Tools and Configuration Files (server/templates/, server/i18n/, +server/public/, webapp/ and all subdirectories thereof) under the Apache License v2.0. + +We promise that we will not enforce the copyleft provisions in AGPL v3.0 against you if your application (a) does not +link to the Mattermost Platform directly, but exclusively uses the Mattermost Admin Tools and Configuration Files, and +(b) you have not modified, added to or adapted the source code of Mattermost in a way that results in the creation of +a “modified version” or “work based on” Mattermost as these terms are defined in the AGPL v3.0 license. + +MATTERMOST TRADEMARK GUIDELINES + +Your use of the mark Mattermost is subject to Mattermost, Inc's prior written approval and our organization’s Trademark +Standards of Use at https://mattermost.com/trademark-standards-of-use/. For trademark approval or any questions +you have about using these trademarks, please email trademark@mattermost.com + +------------------------------------------------------------------------------------------------------------------------------ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +------------------------------------------------------------------------------ + +The software is released under the terms of the GNU Affero General Public +License, version 3. + + GNU AFFERO GENERAL PUBLIC LICENSE + Version 3, 19 November 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU Affero General Public License is a free, copyleft license for +software and other kinds of works, specifically designed to ensure +cooperation with the community in the case of network server software. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +our General Public Licenses are intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + Developers that use our General Public Licenses protect your rights +with two steps: (1) assert copyright on the software, and (2) offer +you this License which gives you legal permission to copy, distribute +and/or modify the software. + + A secondary benefit of defending all users' freedom is that +improvements made in alternate versions of the program, if they +receive widespread use, become available for other developers to +incorporate. Many developers of free software are heartened and +encouraged by the resulting cooperation. However, in the case of +software used on network servers, this result may fail to come about. +The GNU General Public License permits making a modified version and +letting the public access it on a server without ever releasing its +source code to the public. + + The GNU Affero General Public License is designed specifically to +ensure that, in such cases, the modified source code becomes available +to the community. It requires the operator of a network server to +provide the source code of the modified version running there to the +users of that server. Therefore, public use of a modified version, on +a publicly accessible server, gives the public access to the source +code of the modified version. + + An older license, called the Affero General Public License and +published by Affero, was designed to accomplish similar goals. This is +a different license, not a version of the Affero GPL, but Affero has +released a new version of the Affero GPL which permits relicensing under +this license. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU Affero General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Remote Network Interaction; Use with the GNU General Public License. + + Notwithstanding any other provision of this License, if you modify the +Program, your modified version must prominently offer all users +interacting with it remotely through a computer network (if your version +supports such interaction) an opportunity to receive the Corresponding +Source of your version by providing access to the Corresponding Source +from a network server at no charge, through some standard or customary +means of facilitating copying of software. This Corresponding Source +shall include the Corresponding Source for any work covered by version 3 +of the GNU General Public License that is incorporated pursuant to the +following paragraph. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the work with which it is combined will remain governed by version +3 of the GNU General Public License. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU Affero General Public License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU Affero General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU Affero General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU Affero General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If your software can interact with users remotely through a computer +network, you should also make sure that it provides a way for users to +get its source. For example, if your program is a web application, its +interface could display a "Source" link that leads users to an archive +of the code. There are many ways you could offer source, and different +solutions will be better for different programs; see section 13 for the +specific requirements. + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU AGPL, see +. + + +--- + +## @mattermost/compass-icons + +This product contains '@mattermost/compass-icons' by Mattermost. + +* LICENSE: MIT + + + +--- + +## @mattermost/eslint-plugin + +This product contains '@mattermost/eslint-plugin' by Mattermost, Inc.. + +ESLint configuration and custom rules used by Mattermost + +* HOMEPAGE: + * https://github.com/mattermost/mattermost/tree/master/webapp/platform/eslint-plugin#readme + +* LICENSE: Apache 2.0 + +Mattermost Licensing + +SOFTWARE LICENSING + +You are licensed to use compiled versions of the Mattermost platform produced by Mattermost, Inc. under an MIT LICENSE + +- See MIT-COMPILED-LICENSE.md included in compiled versions for details + +You may be licensed to use source code to create compiled versions not produced by Mattermost, Inc. in one of two ways: + +1. Under the Free Software Foundation’s GNU AGPL v3.0, subject to the exceptions outlined in this policy; or +2. Under a commercial license available from Mattermost, Inc. by contacting commercial@mattermost.com + +You are licensed to use the source code in Admin Tools and Configuration Files (server/templates/, server/i18n/, +server/public/, webapp/ and all subdirectories thereof) under the Apache License v2.0. + +We promise that we will not enforce the copyleft provisions in AGPL v3.0 against you if your application (a) does not +link to the Mattermost Platform directly, but exclusively uses the Mattermost Admin Tools and Configuration Files, and +(b) you have not modified, added to or adapted the source code of Mattermost in a way that results in the creation of +a “modified version” or “work based on” Mattermost as these terms are defined in the AGPL v3.0 license. + +MATTERMOST TRADEMARK GUIDELINES + +Your use of the mark Mattermost is subject to Mattermost, Inc's prior written approval and our organization’s Trademark +Standards of Use at https://mattermost.com/trademark-standards-of-use/. For trademark approval or any questions +you have about using these trademarks, please email trademark@mattermost.com + +------------------------------------------------------------------------------------------------------------------------------ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +------------------------------------------------------------------------------ + +The software is released under the terms of the GNU Affero General Public +License, version 3. + + GNU AFFERO GENERAL PUBLIC LICENSE + Version 3, 19 November 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU Affero General Public License is a free, copyleft license for +software and other kinds of works, specifically designed to ensure +cooperation with the community in the case of network server software. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +our General Public Licenses are intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + Developers that use our General Public Licenses protect your rights +with two steps: (1) assert copyright on the software, and (2) offer +you this License which gives you legal permission to copy, distribute +and/or modify the software. + + A secondary benefit of defending all users' freedom is that +improvements made in alternate versions of the program, if they +receive widespread use, become available for other developers to +incorporate. Many developers of free software are heartened and +encouraged by the resulting cooperation. However, in the case of +software used on network servers, this result may fail to come about. +The GNU General Public License permits making a modified version and +letting the public access it on a server without ever releasing its +source code to the public. + + The GNU Affero General Public License is designed specifically to +ensure that, in such cases, the modified source code becomes available +to the community. It requires the operator of a network server to +provide the source code of the modified version running there to the +users of that server. Therefore, public use of a modified version, on +a publicly accessible server, gives the public access to the source +code of the modified version. + + An older license, called the Affero General Public License and +published by Affero, was designed to accomplish similar goals. This is +a different license, not a version of the Affero GPL, but Affero has +released a new version of the Affero GPL which permits relicensing under +this license. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU Affero General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Remote Network Interaction; Use with the GNU General Public License. + + Notwithstanding any other provision of this License, if you modify the +Program, your modified version must prominently offer all users +interacting with it remotely through a computer network (if your version +supports such interaction) an opportunity to receive the Corresponding +Source of your version by providing access to the Corresponding Source +from a network server at no charge, through some standard or customary +means of facilitating copying of software. This Corresponding Source +shall include the Corresponding Source for any work covered by version 3 +of the GNU General Public License that is incorporated pursuant to the +following paragraph. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the work with which it is combined will remain governed by version +3 of the GNU General Public License. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU Affero General Public License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU Affero General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU Affero General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU Affero General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If your software can interact with users remotely through a computer +network, you should also make sure that it provides a way for users to +get its source. For example, if your program is a web application, its +interface could display a "Source" link that leads users to an archive +of the code. There are many ways you could offer source, and different +solutions will be better for different programs; see section 13 for the +specific requirements. + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU AGPL, see +. + + +--- + +## Go + +This product contains 'Go' by Go. + +The Go programming language + +* HOMEPAGE: + * https://go.dev + +* LICENSE: BSD 3-Clause "New" or "Revised" License + +Copyright 2009 The Go Authors. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google LLC nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +--- + +## Masterminds/squirrel + +This product contains 'Masterminds/squirrel' by Masterminds. + +Fluent SQL generation for golang + +* HOMEPAGE: + * https://github.com/Masterminds/squirrel + +* LICENSE: Other + +MIT License + +Squirrel: The Masterminds +Copyright (c) 2014-2015, Lann Martin. Copyright (C) 2015-2016, Google. Copyright (C) 2015, Matt Farina and Matt Butcher. + + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + +--- + +## andygrunwald/go-jira + +This product contains 'andygrunwald/go-jira' by Andy Grunwald. + +Go client library for Atlassian Jira + +* HOMEPAGE: + * https://pkg.go.dev/github.com/andygrunwald/go-jira?tab=doc + +* LICENSE: MIT License + + + +--- + +## anthropics/anthropic-sdk-go + +This product contains 'anthropics/anthropic-sdk-go' by Anthropic. + +Access to Anthropic's safety-first language model APIs via Go + +* HOMEPAGE: + * https://github.com/anthropics/anthropic-sdk-go + +* LICENSE: MIT License + +Copyright 2023 Anthropic, PBC. + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + + +--- + +## asticode/go-astisub + +This product contains 'asticode/go-astisub' by Quentin Renard. + +Manipulate subtitles in GO (.srt, .ssa/.ass, .stl, .ttml, .vtt (webvtt), teletext, etc.) + +* HOMEPAGE: + * https://github.com/asticode/go-astisub + +* LICENSE: MIT License + +MIT License + +Copyright (c) 2016 Quentin Renard + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + +--- + +## core-js + +This product contains 'core-js' by Denis Pushkarev. + +Standard library + +* HOMEPAGE: + * https://github.com/zloirock/core-js#readme + +* LICENSE: MIT + +Copyright (c) 2014-2025 Denis Pushkarev + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +--- + +## debounce + +This product contains 'debounce'. + +Delay function calls until a set time elapses after the last invocation + +* HOMEPAGE: + * https://github.com/sindresorhus/debounce#readme + +* LICENSE: MIT + + + +--- + +## gin-gonic/gin + +This product contains 'gin-gonic/gin' by Gin-Gonic. + +Gin is a HTTP web framework written in Go (Golang). It features a Martini-like API with much better performance -- up to 40 times faster. If you need smashing performance, get yourself some Gin. + +* HOMEPAGE: + * https://gin-gonic.com/ + +* LICENSE: MIT License + +The MIT License (MIT) + +Copyright (c) 2014 Manuel Martínez-Almeida + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +--- + +## google/go-github + +This product contains 'google/go-github' by Google. + +Go library for accessing the GitHub v3 API + +* HOMEPAGE: + * https://pkg.go.dev/github.com/google/go-github/v68/github + +* LICENSE: BSD 3-Clause "New" or "Revised" License + +Copyright (c) 2013 The go-github AUTHORS. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +--- + +## invopop/jsonschema + +This product contains 'invopop/jsonschema' by Invopop. + +Generate JSON Schemas from Go types + +* HOMEPAGE: + * https://github.com/invopop/jsonschema + +* LICENSE: MIT License + + + +--- + +## jmoiron/sqlx + +This product contains 'jmoiron/sqlx' by Jason Moiron. + +general purpose extensions to golang's database/sql + +* HOMEPAGE: + * http://jmoiron.github.io/sqlx/ + +* LICENSE: MIT License + + Copyright (c) 2013, Jason Moiron + + Permission is hereby granted, free of charge, to any person + obtaining a copy of this software and associated documentation + files (the "Software"), to deal in the Software without + restriction, including without limitation the rights to use, + copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following + conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES + OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT + HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, + WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR + OTHER DEALINGS IN THE SOFTWARE. + + + +--- + +## luxon + +This product contains 'luxon' by Isaac Cambron. + +Immutable date wrapper + +* HOMEPAGE: + * https://github.com/moment/luxon#readme + +* LICENSE: MIT + +Copyright 2019 JS Foundation and other contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + +--- + +## mattermost/mattermost + +This product contains 'mattermost/mattermost' by Mattermost. + +Mattermost is an open source platform for secure collaboration across the entire software development lifecycle.. + +* HOMEPAGE: + * https://mattermost.com + +* LICENSE: Other + +Mattermost Licensing + +SOFTWARE LICENSING + +You are licensed to use compiled versions of the Mattermost platform produced by Mattermost, Inc. under an MIT LICENSE + +- See MIT-COMPILED-LICENSE.md included in compiled versions for details + +You may be licensed to use source code to create compiled versions not produced by Mattermost, Inc. in one of two ways: + +1. Under the Free Software Foundation’s GNU AGPL v3.0, subject to the exceptions outlined in this policy; or +2. Under a commercial license available from Mattermost, Inc. by contacting commercial@mattermost.com + +You are licensed to use the source code in Admin Tools and Configuration Files (server/templates/, server/i18n/, +server/public/, webapp/ and all subdirectories thereof) under the Apache License v2.0. + +We promise that we will not enforce the copyleft provisions in AGPL v3.0 against you if your application (a) does not +link to the Mattermost Platform directly, but exclusively uses the Mattermost Admin Tools and Configuration Files, and +(b) you have not modified, added to or adapted the source code of Mattermost in a way that results in the creation of +a “modified version” or “work based on” Mattermost as these terms are defined in the AGPL v3.0 license. + +MATTERMOST TRADEMARK GUIDELINES + +Your use of the mark Mattermost is subject to Mattermost, Inc's prior written approval and our organization’s Trademark +Standards of Use at https://mattermost.com/trademark-standards-of-use/. For trademark approval or any questions +you have about using these trademarks, please email trademark@mattermost.com + +------------------------------------------------------------------------------------------------------------------------------ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +------------------------------------------------------------------------------ + +The software is released under the terms of the GNU Affero General Public +License, version 3. + + GNU AFFERO GENERAL PUBLIC LICENSE + Version 3, 19 November 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU Affero General Public License is a free, copyleft license for +software and other kinds of works, specifically designed to ensure +cooperation with the community in the case of network server software. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +our General Public Licenses are intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + Developers that use our General Public Licenses protect your rights +with two steps: (1) assert copyright on the software, and (2) offer +you this License which gives you legal permission to copy, distribute +and/or modify the software. + + A secondary benefit of defending all users' freedom is that +improvements made in alternate versions of the program, if they +receive widespread use, become available for other developers to +incorporate. Many developers of free software are heartened and +encouraged by the resulting cooperation. However, in the case of +software used on network servers, this result may fail to come about. +The GNU General Public License permits making a modified version and +letting the public access it on a server without ever releasing its +source code to the public. + + The GNU Affero General Public License is designed specifically to +ensure that, in such cases, the modified source code becomes available +to the community. It requires the operator of a network server to +provide the source code of the modified version running there to the +users of that server. Therefore, public use of a modified version, on +a publicly accessible server, gives the public access to the source +code of the modified version. + + An older license, called the Affero General Public License and +published by Affero, was designed to accomplish similar goals. This is +a different license, not a version of the Affero GPL, but Affero has +released a new version of the Affero GPL which permits relicensing under +this license. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU Affero General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Remote Network Interaction; Use with the GNU General Public License. + + Notwithstanding any other provision of this License, if you modify the +Program, your modified version must prominently offer all users +interacting with it remotely through a computer network (if your version +supports such interaction) an opportunity to receive the Corresponding +Source of your version by providing access to the Corresponding Source +from a network server at no charge, through some standard or customary +means of facilitating copying of software. This Corresponding Source +shall include the Corresponding Source for any work covered by version 3 +of the GNU General Public License that is incorporated pursuant to the +following paragraph. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the work with which it is combined will remain governed by version +3 of the GNU General Public License. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU Affero General Public License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU Affero General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU Affero General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU Affero General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If your software can interact with users remotely through a computer +network, you should also make sure that it provides a way for users to +get its source. For example, if your program is a web application, its +interface could display a "Source" link that leads users to an archive +of the code. There are many ways you could offer source, and different +solutions will be better for different programs; see section 13 for the +specific requirements. + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU AGPL, see +. + + +--- + +## nicksnyder/go-i18n + +This product contains 'nicksnyder/go-i18n' by Nick Snyder. + +Translate your Go program into multiple languages. + +* HOMEPAGE: + * https://github.com/nicksnyder/go-i18n + +* LICENSE: MIT License + +Copyright (c) 2014 Nick Snyder https://github.com/nicksnyder + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +--- + +## pkg/errors + +This product contains 'pkg/errors' by pkg. + +Simple error handling primitives + +* HOMEPAGE: + * https://godoc.org/github.com/pkg/errors + +* LICENSE: BSD 2-Clause "Simplified" License + +Copyright (c) 2015, Dave Cheney +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +--- + +## process + +This product contains 'process' by Roman Shtylman. + +process information for node.js and browsers + +* HOMEPAGE: + * https://github.com/shtylman/node-process#readme + +* LICENSE: MIT + + + +--- + +## prometheus/client_golang + +This product contains 'prometheus/client_golang' by Prometheus. + +Prometheus instrumentation library for Go applications + +* HOMEPAGE: + * https://pkg.go.dev/github.com/prometheus/client_golang + +* LICENSE: Apache License 2.0 + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +--- + +## react + +This product contains 'react'. + +React is a JavaScript library for building user interfaces. + +* HOMEPAGE: + * https://react.dev/ + +* LICENSE: MIT + +MIT License + +Copyright (c) Meta Platforms, Inc. and affiliates. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + +--- + +## react-dom + +This product contains 'react-dom'. + +React package for working with the DOM. + +* HOMEPAGE: + * https://react.dev/ + +* LICENSE: MIT + +MIT License + +Copyright (c) Meta Platforms, Inc. and affiliates. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + +--- + +## react-intl + +This product contains 'react-intl' by Eric Ferraiuolo. + +Internationalize React apps. This library provides React components and an API to format dates, numbers, and strings, including pluralization and handling translations. + +* HOMEPAGE: + * https://formatjs.github.io/docs/react-intl + +* LICENSE: BSD-3-Clause + + + +--- + +## react-redux + +This product contains 'react-redux' by Dan Abramov. + +Official React bindings for Redux + +* HOMEPAGE: + * https://github.com/reduxjs/react-redux + +* LICENSE: MIT + +The MIT License (MIT) + +Copyright (c) 2015-present Dan Abramov + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + +--- + +## react-select + +This product contains 'react-select' by Jed Watson. + +A Select control built with and for ReactJS + +* HOMEPAGE: + * https://github.com/JedWatson/react-select/tree/master#readme + +* LICENSE: MIT + +The MIT License (MIT) + +Copyright (c) 2022 Jed Watson + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + +--- + +## react-use + +This product contains 'react-use' by @streamich. + +Collection of React Hooks + +* HOMEPAGE: + * https://github.com/streamich/react-use#readme + +* LICENSE: Unlicense + +This is free and unencumbered software released into the public domain. + +Anyone is free to copy, modify, publish, use, compile, sell, or +distribute this software, either in source code form or as a compiled +binary, for any purpose, commercial or non-commercial, and by any +means. + +In jurisdictions that recognize copyright laws, the author or authors +of this software dedicate any and all copyright interest in the +software to the public domain. We make this dedication for the benefit +of the public at large and to the detriment of our heirs and +successors. We intend this dedication to be an overt act of +relinquishment in perpetuity of all present and future rights to this +software under copyright law. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR +OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + +For more information, please refer to + + +--- + +## redux + +This product contains 'redux'. + +Predictable state container for JavaScript apps + +* HOMEPAGE: + * http://redux.js.org + +* LICENSE: MIT + +The MIT License (MIT) + +Copyright (c) 2015-present Dan Abramov + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + +--- + +## sashabaranov/go-openai + +This product contains 'sashabaranov/go-openai' by Alex Baranov. + +OpenAI ChatGPT, GPT-3, GPT-4, DALL·E, Whisper API wrapper for Go + +* HOMEPAGE: + * https://github.com/sashabaranov/go-openai + +* LICENSE: Apache License 2.0 + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +--- + +## sirupsen/logrus + +This product contains 'sirupsen/logrus' by Simon Eskildsen. + +Structured, pluggable logging for Go. + +* HOMEPAGE: + * https://github.com/sirupsen/logrus + +* LICENSE: MIT License + +The MIT License (MIT) + +Copyright (c) 2014 Simon Eskildsen + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +--- + +## stretchr/testify + +This product contains 'stretchr/testify' by Stretchr, Inc.. + +A toolkit with common assertions and mocks that plays nicely with the standard library + +* HOMEPAGE: + * https://github.com/stretchr/testify + +* LICENSE: MIT License + +MIT License + +Copyright (c) 2012-2020 Mat Ryer, Tyler Bunnell and contributors. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + +--- + +## styled-components + +This product contains 'styled-components' by Glen Maddern. + +CSS for the Age. Style components your way with speed, strong typing, and flexibility. + +* HOMEPAGE: + * https://styled-components.com + +* LICENSE: MIT + +MIT License + +Copyright (c) 2016-present Glen Maddern and Maximilian Stoiber + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + +--- + +## typescript + +This product contains 'typescript' by Microsoft Corp.. + +TypeScript is a language for application scale JavaScript development + +* HOMEPAGE: + * https://www.typescriptlang.org/ + +* LICENSE: Apache-2.0 + +Apache License + +Version 2.0, January 2004 + +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. + +"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: + +You must give any other recipients of the Work or Derivative Works a copy of this License; and + +You must cause any modified files to carry prominent notices stating that You changed the files; and + +You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and + +If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + + +--- + +## x/text + +This product contains 'x/text' by Go. + +[mirror] Go text processing support + +* HOMEPAGE: + * https://golang.org/x/text + +* LICENSE: BSD 3-Clause "New" or "Revised" License + +Copyright 2009 The Go Authors. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google LLC nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + diff --git a/vendor/github.com/mattermost/mattermost-plugin-ai/llm/annotations.go b/vendor/github.com/mattermost/mattermost-plugin-ai/llm/annotations.go new file mode 100644 index 00000000..699f4ca8 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost-plugin-ai/llm/annotations.go @@ -0,0 +1,23 @@ +// Copyright (c) 2023-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package llm + +// AnnotationType represents different types of annotations +type AnnotationType string + +const ( + // AnnotationTypeURLCitation represents a web search citation + AnnotationTypeURLCitation AnnotationType = "url_citation" +) + +// Annotation represents an inline annotation/citation in the response text +type Annotation struct { + Type AnnotationType `json:"type"` // Type of annotation + StartIndex int `json:"start_index"` // Start position in message text (0-based) + EndIndex int `json:"end_index"` // End position in message text (0-based) + URL string `json:"url"` // Source URL + Title string `json:"title"` // Source title + CitedText string `json:"cited_text,omitempty"` // Optional: text being cited (for context) + Index int `json:"index"` // Display index (1-based for UI) +} diff --git a/vendor/github.com/mattermost/mattermost-plugin-ai/llm/completion_request.go b/vendor/github.com/mattermost/mattermost-plugin-ai/llm/completion_request.go new file mode 100644 index 00000000..6d1a4775 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost-plugin-ai/llm/completion_request.go @@ -0,0 +1,97 @@ +// Copyright (c) 2023-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package llm + +import ( + "io" + "slices" + "strings" +) + +type File struct { + MimeType string + Size int64 + Reader io.Reader +} + +type PostRole int + +const ( + PostRoleUser PostRole = iota + PostRoleBot + PostRoleSystem +) + +type Post struct { + Role PostRole + Message string + Files []File + ToolUse []ToolCall + Reasoning string // Extended thinking/reasoning content from models that support it + ReasoningSignature string // Signature for thinking blocks (opaque verification field) +} + +type CompletionRequest struct { + Posts []Post + Context *Context +} + +func (b *CompletionRequest) Truncate(maxTokens int, countTokens func(string) int) bool { + oldPosts := b.Posts + b.Posts = make([]Post, 0, len(oldPosts)) + var totalTokens int + for i := len(oldPosts) - 1; i >= 0; i-- { + post := oldPosts[i] + if totalTokens >= maxTokens { + slices.Reverse(b.Posts) + return true + } + postTokens := countTokens(post.Message) + if (totalTokens + postTokens) > maxTokens { + charactersToCut := (postTokens - (maxTokens - totalTokens)) * 4 + post.Message = strings.TrimSpace(post.Message[charactersToCut:]) + b.Posts = append(b.Posts, post) + slices.Reverse(b.Posts) + return true + } + totalTokens += postTokens + b.Posts = append(b.Posts, post) + } + + slices.Reverse(b.Posts) + return false +} + +// ExtractSystemMessage extracts the system message from the conversation. +func (b CompletionRequest) ExtractSystemMessage() string { + for _, post := range b.Posts { + if post.Role == PostRoleSystem { + return post.Message + } + } + return "" +} + +func (b CompletionRequest) String() string { + // Create a string of all the posts with their role and message + var result strings.Builder + result.WriteString("--- Conversation ---") + for _, post := range b.Posts { + switch post.Role { + case PostRoleUser: + result.WriteString("\n--- User ---\n") + case PostRoleBot: + result.WriteString("\n--- Bot ---\n") + case PostRoleSystem: + result.WriteString("\n--- System ---\n") + default: + result.WriteString("\n--- ---\n") + } + result.WriteString(post.Message) + } + result.WriteString("\n--- Context ---\n") + result.WriteString(b.Context.String()) + + return result.String() +} diff --git a/vendor/github.com/mattermost/mattermost-plugin-ai/llm/configuration.go b/vendor/github.com/mattermost/mattermost-plugin-ai/llm/configuration.go new file mode 100644 index 00000000..d3b1638f --- /dev/null +++ b/vendor/github.com/mattermost/mattermost-plugin-ai/llm/configuration.go @@ -0,0 +1,112 @@ +// Copyright (c) 2023-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package llm + +type ServiceConfig struct { + ID string `json:"id"` + Name string `json:"name"` + Type string `json:"type"` + APIKey string `json:"apiKey"` + OrgID string `json:"orgId"` + DefaultModel string `json:"defaultModel"` + APIURL string `json:"apiURL"` + + // Renaming the JSON field to inputTokenLimit would require a migration, leaving as is for now. + InputTokenLimit int `json:"tokenLimit"` + StreamingTimeoutSeconds int `json:"streamingTimeoutSeconds"` + SendUserID bool `json:"sendUserID"` + + // Otherwise known as maxTokens + OutputTokenLimit int `json:"outputTokenLimit"` + + // UseResponsesAPI determines whether to use the new OpenAI Responses API + // Only applicable to OpenAI and OpenAI-compatible services + UseResponsesAPI bool `json:"useResponsesAPI"` +} + +type ChannelAccessLevel int + +const ( + ChannelAccessLevelAll ChannelAccessLevel = iota + ChannelAccessLevelAllow + ChannelAccessLevelBlock + ChannelAccessLevelNone +) + +type UserAccessLevel int + +const ( + UserAccessLevelAll UserAccessLevel = iota + UserAccessLevelAllow + UserAccessLevelBlock + UserAccessLevelNone +) + +type BotConfig struct { + ID string `json:"id"` + Name string `json:"name"` + DisplayName string `json:"displayName"` + CustomInstructions string `json:"customInstructions"` + ServiceID string `json:"serviceID"` + + // Service is deprecated and kept only for backwards compatibility during migration. + Service *ServiceConfig `json:"service,omitempty"` + + EnableVision bool `json:"enableVision"` + DisableTools bool `json:"disableTools"` + ChannelAccessLevel ChannelAccessLevel `json:"channelAccessLevel"` + ChannelIDs []string `json:"channelIDs"` + UserAccessLevel UserAccessLevel `json:"userAccessLevel"` + UserIDs []string `json:"userIDs"` + TeamIDs []string `json:"teamIDs"` + MaxFileSize int64 `json:"maxFileSize"` + + // EnabledNativeTools contains the list of enabled native tools for this bot + // For OpenAI: ["web_search", "file_search", "code_interpreter"] (only works when UseResponsesAPI is true) + // For Anthropic: ["web_search"] + EnabledNativeTools []string `json:"enabledNativeTools"` +} + +func (c *BotConfig) IsValid() bool { + // Basic validation - service validation happens separately + if c.Name == "" || c.DisplayName == "" || c.ServiceID == "" { + return false + } + + // Validate access levels are within bounds + if c.ChannelAccessLevel < ChannelAccessLevelAll || c.ChannelAccessLevel > ChannelAccessLevelNone { + return false + } + if c.UserAccessLevel < UserAccessLevelAll || c.UserAccessLevel > UserAccessLevelNone { + return false + } + + return true +} + +// IsValidService validates a service configuration +func IsValidService(service ServiceConfig) bool { + // Basic validation + if service.ID == "" || service.Type == "" { + return false + } + + // Service-specific validation + switch service.Type { + case ServiceTypeOpenAI: + return service.APIKey != "" + case ServiceTypeOpenAICompatible: + return service.APIURL != "" + case ServiceTypeAzure: + return service.APIKey != "" && service.APIURL != "" + case ServiceTypeAnthropic: + return service.APIKey != "" + case ServiceTypeASage: + return service.APIKey != "" + case ServiceTypeCohere: + return service.APIKey != "" + default: + return false + } +} diff --git a/vendor/github.com/mattermost/mattermost-plugin-ai/llm/context.go b/vendor/github.com/mattermost/mattermost-plugin-ai/llm/context.go new file mode 100644 index 00000000..75c00ad8 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost-plugin-ai/llm/context.go @@ -0,0 +1,91 @@ +// Copyright (c) 2023-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package llm + +import ( + "fmt" + "strings" + "time" + + "github.com/mattermost/mattermost/server/public/model" +) + +// Context represents the data necessary to build the context of the LLM. +// For consumers none of the fields can be assumed to be present. +type Context struct { + // Server + Time string + ServerName string + CompanyName string + + // Location + Team *model.Team + Channel *model.Channel + Thread []Post // Normalized posts that already have been formatted. nil if not in a thread or a root post + + // User that is making the request + RequestingUser *model.User + + // Session information for authentication + SessionID string + // SessionResolver provides secure access to session token when needed + SessionResolver func() (string, error) + + // Bot Specific + BotName string + BotUsername string + BotModel string + CustomInstructions string + + Tools *ToolStore + Parameters map[string]interface{} +} + +// ContextOption defines a function that configures a Context +type ContextOption func(*Context) + +// NewContext creates a new Context with the given options +func NewContext(opts ...ContextOption) *Context { + c := &Context{ + Time: time.Now().UTC().Format(time.RFC1123), + } + + for _, opt := range opts { + opt(c) + } + + return c +} + +func (c Context) String() string { + var result strings.Builder + result.WriteString(fmt.Sprintf("Time: %v\nServerName: %v\nCompanyName: %v", c.Time, c.ServerName, c.CompanyName)) + if c.RequestingUser != nil { + result.WriteString(fmt.Sprintf("\nRequestingUser: %v", c.RequestingUser.Username)) + } + if c.SessionID != "" { + result.WriteString(fmt.Sprintf("\nSessionID: %v", c.SessionID)) + } + if c.Channel != nil { + result.WriteString(fmt.Sprintf("\nChannel: %v", c.Channel.Name)) + } + if c.Team != nil { + result.WriteString(fmt.Sprintf("\nTeam: %v", c.Team.Name)) + } + + result.WriteString("\n--- Parameters ---\n") + for key := range c.Parameters { + result.WriteString(fmt.Sprintf(" %v", key)) + } + + if c.Tools != nil { + result.WriteString("\n--- Tools ---\n") + for _, tool := range c.Tools.GetTools() { + result.WriteString(tool.Name) + result.WriteString(" ") + } + } + + return result.String() +} diff --git a/vendor/github.com/mattermost/mattermost-plugin-ai/llm/language_model.go b/vendor/github.com/mattermost/mattermost-plugin-ai/llm/language_model.go new file mode 100644 index 00000000..5fa947f9 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost-plugin-ai/llm/language_model.go @@ -0,0 +1,60 @@ +// Copyright (c) 2023-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +// Package llm provides a unified abstraction layer for Large Language Model interactions +// within the Mattermost AI plugin. +// +// This package defines the core interfaces and data structures for working with various +// LLM providers (OpenAI, Anthropic, etc.) in a consistent manner. It handles: +// +// - LanguageModel interface abstraction for different LLM providers +// - Conversation management with structured posts, roles, and context +// - Prompt template system with embedded templates and variable substitution +// - Streaming text responses for real-time chat interactions +// - Tool/function calling capabilities with JSON schema validation +// - Request/response structures with token counting and truncation +// - Context management including user info, channels, and bot configurations +// +// The package is designed to be provider-agnostic, allowing the plugin to work +// with multiple LLM services through a common interface while preserving +// provider-specific capabilities like vision, JSON output, and tool calling. +package llm + +import ( + "github.com/google/jsonschema-go/jsonschema" +) + +type LanguageModel interface { + ChatCompletion(conversation CompletionRequest, opts ...LanguageModelOption) (*TextStreamResult, error) + ChatCompletionNoStream(conversation CompletionRequest, opts ...LanguageModelOption) (string, error) + + CountTokens(text string) int + InputTokenLimit() int +} + +type LanguageModelConfig struct { + Model string + MaxGeneratedTokens int + EnableVision bool + JSONOutputFormat *jsonschema.Schema +} + +type LanguageModelOption func(*LanguageModelConfig) + +func WithModel(model string) LanguageModelOption { + return func(cfg *LanguageModelConfig) { + cfg.Model = model + } +} +func WithMaxGeneratedTokens(maxGeneratedTokens int) LanguageModelOption { + return func(cfg *LanguageModelConfig) { + cfg.MaxGeneratedTokens = maxGeneratedTokens + } +} +func WithJSONOutput[T any]() LanguageModelOption { + return func(cfg *LanguageModelConfig) { + cfg.JSONOutputFormat = NewJSONSchemaFromStruct[T]() + } +} + +type LanguageModelWrapper func(LanguageModel) LanguageModel diff --git a/vendor/github.com/mattermost/mattermost-plugin-ai/llm/logging.go b/vendor/github.com/mattermost/mattermost-plugin-ai/llm/logging.go new file mode 100644 index 00000000..507951c3 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost-plugin-ai/llm/logging.go @@ -0,0 +1,81 @@ +// Copyright (c) 2023-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package llm + +import ( + "fmt" + "testing" + + "github.com/mattermost/mattermost/server/public/pluginapi" +) + +type LanguageModelLogWrapper struct { + log pluginapi.LogService + wrapped LanguageModel +} + +func NewLanguageModelLogWrapper(log pluginapi.LogService, wrapped LanguageModel) *LanguageModelLogWrapper { + return &LanguageModelLogWrapper{ + log: log, + wrapped: wrapped, + } +} + +func (w *LanguageModelLogWrapper) logInput(request CompletionRequest, opts ...LanguageModelOption) { + prompt := fmt.Sprintf("\n%v", request) + w.log.Info("LLM Call", "prompt", prompt) +} + +func (w *LanguageModelLogWrapper) ChatCompletion(request CompletionRequest, opts ...LanguageModelOption) (*TextStreamResult, error) { + w.logInput(request, opts...) + return w.wrapped.ChatCompletion(request, opts...) +} + +func (w *LanguageModelLogWrapper) ChatCompletionNoStream(request CompletionRequest, opts ...LanguageModelOption) (string, error) { + w.logInput(request, opts...) + return w.wrapped.ChatCompletionNoStream(request, opts...) +} + +func (w *LanguageModelLogWrapper) CountTokens(text string) int { + return w.wrapped.CountTokens(text) +} + +func (w *LanguageModelLogWrapper) InputTokenLimit() int { + return w.wrapped.InputTokenLimit() +} + +type LanguageModelTestLogWrapper struct { + t *testing.T + wrapped LanguageModel +} + +func NewLanguageModelTestLogWrapper(t *testing.T, wrapped LanguageModel) *LanguageModelTestLogWrapper { + return &LanguageModelTestLogWrapper{ + t: t, + wrapped: wrapped, + } +} + +func (w *LanguageModelTestLogWrapper) logInput(request CompletionRequest, opts ...LanguageModelOption) { + prompt := fmt.Sprintf("\n%v", request) + w.t.Log(prompt) +} + +func (w *LanguageModelTestLogWrapper) ChatCompletion(request CompletionRequest, opts ...LanguageModelOption) (*TextStreamResult, error) { + w.logInput(request, opts...) + return w.wrapped.ChatCompletion(request, opts...) +} + +func (w *LanguageModelTestLogWrapper) ChatCompletionNoStream(request CompletionRequest, opts ...LanguageModelOption) (string, error) { + w.logInput(request, opts...) + return w.wrapped.ChatCompletionNoStream(request, opts...) +} + +func (w *LanguageModelTestLogWrapper) CountTokens(text string) int { + return w.wrapped.CountTokens(text) +} + +func (w *LanguageModelTestLogWrapper) InputTokenLimit() int { + return w.wrapped.InputTokenLimit() +} diff --git a/vendor/github.com/mattermost/mattermost-plugin-ai/llm/prompts.go b/vendor/github.com/mattermost/mattermost-plugin-ai/llm/prompts.go new file mode 100644 index 00000000..503781f8 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost-plugin-ai/llm/prompts.go @@ -0,0 +1,69 @@ +// Copyright (c) 2023-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package llm + +import ( + "fmt" + "io/fs" + "strings" + "text/template" + + "errors" +) + +type Prompts struct { + templates *template.Template +} + +const PromptExtension = "tmpl" + +func NewPrompts(input fs.FS) (*Prompts, error) { + templates, err := template.ParseFS(input, "*.tmpl") + if err != nil { + return nil, fmt.Errorf("unable to parse prompt templates: %w", err) + } + + return &Prompts{ + templates: templates, + }, nil +} + +func withPromptExtension(filename string) string { + return filename + "." + PromptExtension +} + +func (p *Prompts) FormatString(templateCode string, context *Context) (string, error) { + template, err := p.templates.Clone() + if err != nil { + return "", err + } + + template, err = template.Parse(templateCode) + if err != nil { + return "", err + } + + out := &strings.Builder{} + if err := template.Execute(out, context); err != nil { + return "", fmt.Errorf("unable to execute template: %w", err) + } + return strings.TrimSpace(out.String()), nil +} + +func (p *Prompts) Format(templateName string, context *Context) (string, error) { + tmpl := p.templates.Lookup(withPromptExtension(templateName)) + if tmpl == nil { + return "", errors.New("template not found") + } + + return p.execute(tmpl, context) +} + +func (p *Prompts) execute(template *template.Template, data *Context) (string, error) { + out := &strings.Builder{} + if err := template.Execute(out, data); err != nil { + return "", fmt.Errorf("unable to execute template: %w", err) + } + return strings.TrimSpace(out.String()), nil +} diff --git a/vendor/github.com/mattermost/mattermost-plugin-ai/llm/service_types.go b/vendor/github.com/mattermost/mattermost-plugin-ai/llm/service_types.go new file mode 100644 index 00000000..4895b1e0 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost-plugin-ai/llm/service_types.go @@ -0,0 +1,13 @@ +// Copyright (c) 2023-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package llm + +const ( + ServiceTypeOpenAI = "openai" + ServiceTypeOpenAICompatible = "openaicompatible" + ServiceTypeAzure = "azure" + ServiceTypeASage = "asage" + ServiceTypeAnthropic = "anthropic" + ServiceTypeCohere = "cohere" +) diff --git a/vendor/github.com/mattermost/mattermost-plugin-ai/llm/stream.go b/vendor/github.com/mattermost/mattermost-plugin-ai/llm/stream.go new file mode 100644 index 00000000..dc0d074e --- /dev/null +++ b/vendor/github.com/mattermost/mattermost-plugin-ai/llm/stream.go @@ -0,0 +1,100 @@ +// Copyright (c) 2023-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package llm + +import "fmt" + +// EventType represents the type of event in the text stream +type EventType int + +const ( + // EventTypeText represents a text chunk event + EventTypeText EventType = iota + // EventTypeEnd represents the end of the stream + EventTypeEnd + // EventTypeError represents an error event + EventTypeError + // EventTypeToolCalls represents a tool call event + EventTypeToolCalls + // EventTypeReasoning represents a reasoning summary chunk event + EventTypeReasoning + // EventTypeReasoningEnd represents the end of reasoning summary + EventTypeReasoningEnd + // EventTypeAnnotations represents annotations/citations in the response + EventTypeAnnotations + // EventTypeUsage represents token usage data + EventTypeUsage +) + +// TokenUsage represents token usage statistics for an LLM request +type TokenUsage struct { + InputTokens int64 `json:"input_tokens"` + OutputTokens int64 `json:"output_tokens"` +} + +// ReasoningData represents the complete reasoning/thinking data including signature +type ReasoningData struct { + Text string // The reasoning/thinking text content + Signature string // Opaque verification signature from the model +} + +// TextStreamEvent represents an event in the text stream +type TextStreamEvent struct { + Type EventType + Value any +} + +// TextStreamResult represents a stream of text events +type TextStreamResult struct { + Stream <-chan TextStreamEvent +} + +func NewStreamFromString(text string) *TextStreamResult { + stream := make(chan TextStreamEvent) + + go func() { + // Send the text as a text event + stream <- TextStreamEvent{ + Type: EventTypeText, + Value: text, + } + + // Send end event + stream <- TextStreamEvent{ + Type: EventTypeEnd, + Value: nil, + } + + close(stream) + }() + + return &TextStreamResult{ + Stream: stream, + } +} + +func (t *TextStreamResult) ReadAll() (string, error) { + result := "" + for event := range t.Stream { + switch event.Type { + case EventTypeText: + if textChunk, ok := event.Value.(string); ok { + result += textChunk + } + case EventTypeError: + if err, ok := event.Value.(error); ok { + return "", err + } + case EventTypeEnd: + return result, nil + case EventTypeToolCalls: + return result, fmt.Errorf("Tool calls are not supported for read all") + case EventTypeAnnotations: + // Annotations are ignored in ReadAll, continue reading text + continue + } + } + + return result, nil +} diff --git a/vendor/github.com/mattermost/mattermost-plugin-ai/llm/token_tracking.go b/vendor/github.com/mattermost/mattermost-plugin-ai/llm/token_tracking.go new file mode 100644 index 00000000..5701624e --- /dev/null +++ b/vendor/github.com/mattermost/mattermost-plugin-ai/llm/token_tracking.go @@ -0,0 +1,133 @@ +// Copyright (c) 2023-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package llm + +import ( + "encoding/json" + "errors" + "fmt" + + "github.com/mattermost/mattermost/server/public/shared/mlog" +) + +// TokenUsageLoggingWrapper wraps a LanguageModel to log token usage +type TokenUsageLoggingWrapper struct { + wrapped LanguageModel + botUsername string + tokenLogger *mlog.Logger +} + +// NewTokenUsageLoggingWrapper creates a new wrapper that logs token usage +func NewTokenUsageLoggingWrapper(wrapped LanguageModel, botUsername string, tokenLogger *mlog.Logger) *TokenUsageLoggingWrapper { + return &TokenUsageLoggingWrapper{ + wrapped: wrapped, + botUsername: botUsername, + tokenLogger: tokenLogger, + } +} + +// CreateTokenLogger creates a dedicated logger for token usage metrics +func CreateTokenLogger() (*mlog.Logger, error) { + logger, err := mlog.NewLogger() + if err != nil { + return nil, fmt.Errorf("failed to create token logger: %w", err) + } + + jsonTargetCfg := mlog.TargetCfg{ + Type: "file", + Format: "json", + Levels: []mlog.Level{mlog.LvlInfo, mlog.LvlDebug}, + } + jsonFileOptions := map[string]interface{}{ + "filename": "logs/agents/token_usage.log", + "max_size": 100, // MB + "compress": true, // compress rotated files + } + jsonOptions, err := json.Marshal(jsonFileOptions) + if err != nil { + return nil, fmt.Errorf("failed to marshal json file options: %w", err) + } + jsonTargetCfg.Options = json.RawMessage(jsonOptions) + + err = logger.ConfigureTargets(map[string]mlog.TargetCfg{ + "token_usage": jsonTargetCfg, + }, nil) + if err != nil { + return nil, fmt.Errorf("failed to configure token logger targets: %w", err) + } + + return logger, nil +} + +// ChatCompletion intercepts the streaming response to extract and log token usage +func (w *TokenUsageLoggingWrapper) ChatCompletion(request CompletionRequest, opts ...LanguageModelOption) (*TextStreamResult, error) { + result, err := w.wrapped.ChatCompletion(request, opts...) + if err != nil { + return nil, err + } + + if w.tokenLogger == nil { + return nil, errors.New("token logger is nil") + } + + interceptedStream := make(chan TextStreamEvent) + + go func() { + defer close(interceptedStream) + + for event := range result.Stream { + if event.Type != EventTypeUsage { + interceptedStream <- event + continue + } + + usage, ok := event.Value.(TokenUsage) + if !ok { + continue + } + + userID := "unknown" + teamID := "unknown" + if request.Context != nil { + if request.Context.RequestingUser != nil { + userID = request.Context.RequestingUser.Id + } + if request.Context.Team != nil { + teamID = request.Context.Team.Id + } + } + + w.tokenLogger.Info("Token Usage", + mlog.String("user_id", userID), + mlog.String("team_id", teamID), + mlog.String("bot_username", w.botUsername), + mlog.Int("input_tokens", usage.InputTokens), + mlog.Int("output_tokens", usage.OutputTokens), + mlog.Int("total_tokens", usage.InputTokens+usage.OutputTokens), + ) + } + }() + + return &TextStreamResult{Stream: interceptedStream}, nil +} + +// ChatCompletionNoStream uses the streaming method internally, so token usage +// logging happens automatically when ReadAll() processes the intercepted stream +func (w *TokenUsageLoggingWrapper) ChatCompletionNoStream(request CompletionRequest, opts ...LanguageModelOption) (string, error) { + result, err := w.ChatCompletion(request, opts...) + if err != nil { + return "", err + } + return result.ReadAll() +} + +// CountTokens delegates to the wrapped model +func (w *TokenUsageLoggingWrapper) CountTokens(text string) int { + return w.wrapped.CountTokens(text) +} + +// InputTokenLimit delegates to the wrapped model +func (w *TokenUsageLoggingWrapper) InputTokenLimit() int { + return w.wrapped.InputTokenLimit() +} diff --git a/vendor/github.com/mattermost/mattermost-plugin-ai/llm/tools.go b/vendor/github.com/mattermost/mattermost-plugin-ai/llm/tools.go new file mode 100644 index 00000000..0a150380 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost-plugin-ai/llm/tools.go @@ -0,0 +1,165 @@ +// Copyright (c) 2023-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package llm + +import ( + "encoding/json" + "errors" + "fmt" + + "github.com/google/jsonschema-go/jsonschema" +) + +// Tool represents a function that can be called by the language model during a conversation. +// +// Each tool has a name, description, and schema that defines its parameters. These are passed to the LLM for it to understand what capabilities it has. +// It is the Resolver function that implements the actual functionality. +// +// The Schema field should contain a JSONSchema that defines the expected structure of the tool's arguments. +// The Resolver function receives the conversation context and a way to access the parsed arguments, +// and returns either a result that will be passed to the LLM or an error. +type Tool struct { + Name string + Description string + Schema *jsonschema.Schema + Resolver ToolResolver +} + +type ToolResolver func(context *Context, argsGetter ToolArgumentGetter) (string, error) + +// ToolCallStatus represents the current status of a tool call +type ToolCallStatus int + +const ( + // ToolCallStatusPending indicates the tool is waiting for user approval/rejection + ToolCallStatusPending ToolCallStatus = iota + // ToolCallStatusAccepted indicates the user has accepted the tool call but it's not resolved yet + ToolCallStatusAccepted + // ToolCallStatusRejected indicates the user has rejected the tool call + ToolCallStatusRejected + // ToolCallStatusError indicates the tool call was accepted but errored during resolution + ToolCallStatusError + // ToolCallStatusSuccess indicates the tool call was accepted and resolved successfully + ToolCallStatusSuccess +) + +// ToolCall represents a tool call. An empty result indicates that the tool has not yet been resolved. +type ToolCall struct { + ID string `json:"id"` + Name string `json:"name"` + Description string `json:"description"` + Arguments json.RawMessage `json:"arguments"` + Result string `json:"result"` + Status ToolCallStatus `json:"status"` +} + +type ToolArgumentGetter func(args any) error + +// ToolAuthError represents an authentication error that occurred during tool creation +type ToolAuthError struct { + ServerName string `json:"server_name"` + AuthURL string `json:"auth_url"` + Error error `json:"error"` +} + +type ToolStore struct { + tools map[string]Tool + log TraceLog + doTrace bool + authErrors []ToolAuthError +} + +type TraceLog interface { + Info(message string, keyValuePairs ...any) +} + +// NewJSONSchemaFromStruct creates a JSONSchema from a Go struct using generics +// It's a helper function for tool providers that currently define schemas as structs +func NewJSONSchemaFromStruct[T any]() *jsonschema.Schema { + schema, err := jsonschema.For[T](nil) + if err != nil { + panic(fmt.Sprintf("failed to create JSON schema from struct: %v", err)) + } + + return schema +} + +func NewNoTools() *ToolStore { + return &ToolStore{ + tools: make(map[string]Tool), + log: nil, + doTrace: false, + authErrors: []ToolAuthError{}, + } +} + +func NewToolStore(log TraceLog, doTrace bool) *ToolStore { + return &ToolStore{ + tools: make(map[string]Tool), + log: log, + doTrace: doTrace, + authErrors: []ToolAuthError{}, + } +} + +func (s *ToolStore) AddTools(tools []Tool) { + for _, tool := range tools { + s.tools[tool.Name] = tool + } +} + +func (s *ToolStore) ResolveTool(name string, argsGetter ToolArgumentGetter, context *Context) (string, error) { + tool, ok := s.tools[name] + if !ok { + s.TraceUnknown(name, argsGetter) + return "", errors.New("unknown tool " + name) + } + results, err := tool.Resolver(context, argsGetter) + s.TraceResolved(name, argsGetter, results, err) + return results, err +} + +func (s *ToolStore) GetTools() []Tool { + result := make([]Tool, 0, len(s.tools)) + for _, tool := range s.tools { + result = append(result, tool) + } + return result +} + +func (s *ToolStore) TraceUnknown(name string, argsGetter ToolArgumentGetter) { + if s.log != nil && s.doTrace { + args := "" + var raw json.RawMessage + if err := argsGetter(&raw); err != nil { + args = fmt.Sprintf("failed to get tool args: %v", err) + } else { + args = string(raw) + } + s.log.Info("unknown tool called", "name", name, "args", args) + } +} + +func (s *ToolStore) TraceResolved(name string, argsGetter ToolArgumentGetter, result string, err error) { + if s.log != nil && s.doTrace { + args := "" + var raw json.RawMessage + if getArgsErr := argsGetter(&raw); getArgsErr != nil { + args = fmt.Sprintf("failed to get tool args: %v", getArgsErr) + } else { + args = string(raw) + } + s.log.Info("tool resolved", "name", name, "args", args, "result", result, "error", err) + } +} + +// AddAuthError adds an authentication error to the tool store +func (s *ToolStore) AddAuthError(authError ToolAuthError) { + s.authErrors = append(s.authErrors, authError) +} + +// GetAuthErrors returns all authentication errors collected during tool creation +func (s *ToolStore) GetAuthErrors() []ToolAuthError { + return s.authErrors +} diff --git a/vendor/github.com/mattermost/mattermost-plugin-ai/llm/truncation.go b/vendor/github.com/mattermost/mattermost-plugin-ai/llm/truncation.go new file mode 100644 index 00000000..33a32041 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost-plugin-ai/llm/truncation.go @@ -0,0 +1,42 @@ +// Copyright (c) 2023-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package llm + +import ( + "math" +) + +const FunctionsTokenBudget = 200 +const TokenLimitBufferSize = 0.9 +const MinTokens = 100 + +type TruncationWrapper struct { + wrapped LanguageModel +} + +func NewLLMTruncationWrapper(llm LanguageModel) *TruncationWrapper { + return &TruncationWrapper{ + wrapped: llm, + } +} + +func (w *TruncationWrapper) ChatCompletion(request CompletionRequest, opts ...LanguageModelOption) (*TextStreamResult, error) { + tokenLimit := int(math.Max(math.Floor(float64(w.wrapped.InputTokenLimit()-FunctionsTokenBudget)*TokenLimitBufferSize), MinTokens)) + request.Truncate(tokenLimit, w.wrapped.CountTokens) + return w.wrapped.ChatCompletion(request, opts...) +} + +func (w *TruncationWrapper) ChatCompletionNoStream(request CompletionRequest, opts ...LanguageModelOption) (string, error) { + tokenLimit := int(math.Max(math.Floor(float64(w.wrapped.InputTokenLimit()-FunctionsTokenBudget)*TokenLimitBufferSize), MinTokens)) + request.Truncate(tokenLimit, w.wrapped.CountTokens) + return w.wrapped.ChatCompletionNoStream(request, opts...) +} + +func (w *TruncationWrapper) CountTokens(text string) int { + return w.wrapped.CountTokens(text) +} + +func (w *TruncationWrapper) InputTokenLimit() int { + return w.wrapped.InputTokenLimit() +} diff --git a/vendor/github.com/mattermost/mattermost-plugin-ai/public/bridgeclient/README.md b/vendor/github.com/mattermost/mattermost-plugin-ai/public/bridgeclient/README.md new file mode 100644 index 00000000..14c0318d --- /dev/null +++ b/vendor/github.com/mattermost/mattermost-plugin-ai/public/bridgeclient/README.md @@ -0,0 +1,188 @@ +# Mattermost AI Plugin - LLM Bridge Client + +Go client library for Mattermost plugins and the server to interact with the AI plugin's LLM Bridge API. + +## Quick Start + +### From a Plugin + +```go +import "github.com/mattermost/mattermost-plugin-ai/public/bridgeclient" + +type MyPlugin struct { + plugin.MattermostPlugin + llmClient *bridgeclient.Client +} + +func (p *MyPlugin) OnActivate() error { + p.llmClient = bridgeclient.NewClient(p.API) + return nil +} + +func (p *MyPlugin) handleCommand() { + // Get the bot ID first (e.g., from discovery or configuration) + botID := "bot-user-id-here" + response, err := p.llmClient.AgentCompletion(botID, bridgeclient.CompletionRequest{ + Posts: []bridgeclient.Post{ + {Role: "user", Message: "What is the capital of France?"}, + }, + }) + // Handle response... +} +``` + +### From Mattermost Server + +```go +import "github.com/mattermost/mattermost-plugin-ai/public/bridgeclient" + +type MyService struct { + app *app.App + llmClient *bridgeclient.Client +} + +func NewMyService(app *app.App, userID string) *MyService { + return &MyService{ + app: app, + llmClient: bridgeclient.NewClientFromApp(app, userID), + } +} + +func (s *MyService) process() { + response, err := s.llmClient.ServiceCompletion("anthropic", bridgeclient.CompletionRequest{ + Posts: []bridgeclient.Post{ + {Role: "user", Message: "Write a haiku"}, + }, + }) + // Handle response... +} +``` + +## API Methods + +### Non-Streaming + +```go +// Request by agent Bot ID +response, err := client.AgentCompletion("bot-user-id", request) + +// Request by service name +response, err := client.ServiceCompletion("openai", request) +``` + +### Streaming + +```go +import "github.com/mattermost/mattermost-plugin-ai/llm" + +// Start streaming request (using Bot ID) +result, err := client.AgentCompletionStream("bot-user-id", request) +if err != nil { + return err +} + +// Process events +for event := range result.Stream { + switch event.Type { + case llm.EventTypeText: + fmt.Print(event.Value.(string)) + case llm.EventTypeError: + return event.Value.(error) + case llm.EventTypeEnd: + return nil + } +} +``` + +### Multi-turn Conversations + +```go +request := bridgeclient.CompletionRequest{ + Posts: []bridgeclient.Post{ + {Role: "system", Message: "You are a helpful assistant"}, + {Role: "user", Message: "What is AI?"}, + {Role: "assistant", Message: "AI stands for..."}, + {Role: "user", Message: "Can you give examples?"}, + }, +} +``` + +## Permission Checking + +By default, the bridge does not check permissions. To enable permission checking, include `UserID` and optionally `ChannelID` in your request: + +```go +request := bridgeclient.CompletionRequest{ + Posts: []bridgeclient.Post{ + {Role: "user", Message: "Hello"}, + }, + UserID: userID, // Checks user-level permissions + ChannelID: channelID, // Also checks channel-level permissions +} + +// Returns 403 Forbidden if user lacks permission +response, err := client.AgentCompletion("bot-user-id", request) +``` + +If not using built-in permission checks, your plugin must verify permissions before making requests. + +## Agent vs Service + +- **Agent**: Target a specific bot by its Bot ID (the immutable Mattermost Bot User ID) + - Uses bot's custom configuration, tools, and prompts + - Get bot IDs via the `GetAgents()` discovery endpoint + +- **Service**: Target an LLM service by ID or name (e.g., "openai", "anthropic") + - Uses any bot configured with that service + - Useful when bot-specific configuration doesn't matter + +## Discovery Endpoints + +The bridge API provides discovery endpoints to help clients find available agents and services before making completion requests. + +### Get Available Agents + +```go +// Get all agents +agents, err := client.GetAgents("") +if err != nil { + return err +} + +for _, agent := range agents { + fmt.Printf("Agent: %s (ID: %s, Username: %s) - Service: %s (%s)\n", + agent.DisplayName, agent.ID, agent.Username, agent.ServiceID, agent.ServiceType) + + // Use agent.ID when making completion requests + // response, err := client.AgentCompletion(agent.ID, request) +} +``` + +### Get Available Services + +```go +// Get all services +services, err := client.GetServices("") +if err != nil { + return err +} + +for _, service := range services { + fmt.Printf("Service: %s (%s) - Type: %s\n", + service.Name, service.ID, service.Type) +} +``` + +### Discovery with User Permissions + +Like completion endpoints, discovery endpoints support optional user filtering: + +```go +// Get agents accessible to a specific user +agents, err := client.GetAgents(userID) + +// Get services accessible to a specific user (via their permitted agents) +services, err := client.GetServices(userID) +``` + +This is useful for showing users only the agents and services they have permission to use. diff --git a/vendor/github.com/mattermost/mattermost-plugin-ai/public/bridgeclient/client.go b/vendor/github.com/mattermost/mattermost-plugin-ai/public/bridgeclient/client.go new file mode 100644 index 00000000..15dcc8d2 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost-plugin-ai/public/bridgeclient/client.go @@ -0,0 +1,101 @@ +// Copyright (c) 2023-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +// Package bridgeclient provides a client library for Mattermost plugins and the server +// to interact with the AI plugin's LLM Bridge API to make requests to Agents to LLM providers. +package bridgeclient + +import ( + "net/http" +) + +const ( + aiPluginID = "mattermost-ai" + mattermostServerID = "mattermost-server" +) + +// PluginAPI is the minimal interface needed from the Mattermost plugin API +type PluginAPI interface { + PluginHTTP(*http.Request) *http.Response +} + +// AppAPI is the minimal interface needed from the Mattermost app layer +type AppAPI interface { + ServeInternalPluginRequest(userID string, w http.ResponseWriter, r *http.Request, sourcePluginID, destinationPluginID string) +} + +// Client is a client for the Mattermost Agents Plugin LLM Bridge API +type Client struct { + httpClient http.Client +} + +// Post represents a single message in the conversation +type Post struct { + Role string `json:"role"` // user|assistant|system + Message string `json:"message"` // message content + FileIDs []string `json:"file_ids,omitempty"` // Mattermost file IDs +} + +// CompletionRequest represents a completion request +type CompletionRequest struct { + Posts []Post `json:"posts"` + MaxGeneratedTokens int `json:"max_generated_tokens,omitempty"` + JSONOutputFormat map[string]interface{} `json:"json_output_format,omitempty"` + // UserID is the optional Mattermost user ID making the request. + // If provided, the bridge will check user-level permissions. + UserID string `json:"user_id,omitempty"` + // ChannelID is the optional Mattermost channel ID context for the request. + // If provided along with UserID, the bridge will check both user and channel permissions. + ChannelID string `json:"channel_id,omitempty"` +} + +// CompletionResponse represents a non-streaming completion response +type CompletionResponse struct { + Completion string `json:"completion"` +} + +// ErrorResponse represents an error response from the API +type ErrorResponse struct { + Error string `json:"error"` +} + +// BridgeAgentInfo represents basic agent information from the bridge API +type BridgeAgentInfo struct { + ID string `json:"id"` + DisplayName string `json:"displayName"` + Username string `json:"username"` + ServiceID string `json:"service_id"` + ServiceType string `json:"service_type"` +} + +// BridgeServiceInfo represents basic service information from the bridge API +type BridgeServiceInfo struct { + ID string `json:"id"` + Name string `json:"name"` + Type string `json:"type"` +} + +// AgentsResponse represents the response for the agents endpoint +type AgentsResponse struct { + Agents []BridgeAgentInfo `json:"agents"` +} + +// ServicesResponse represents the response for the services endpoint +type ServicesResponse struct { + Services []BridgeServiceInfo `json:"services"` +} + +// NewClient creates a new LLM Bridge API client from a plugin's API interface. +func NewClient(api PluginAPI) *Client { + client := &Client{} + client.httpClient.Transport = &pluginAPIRoundTripper{api} + return client +} + +// NewClientFromApp creates a new LLM Bridge API client from the Mattermost server app layer. +// The userID is used for inter-plugin request authentication. +func NewClientFromApp(api AppAPI, userID string) *Client { + client := &Client{} + client.httpClient.Transport = &appAPIRoundTripper{api, userID} + return client +} diff --git a/vendor/github.com/mattermost/mattermost-plugin-ai/public/bridgeclient/completion.go b/vendor/github.com/mattermost/mattermost-plugin-ai/public/bridgeclient/completion.go new file mode 100644 index 00000000..00d89363 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost-plugin-ai/public/bridgeclient/completion.go @@ -0,0 +1,200 @@ +// Copyright (c) 2023-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package bridgeclient + +import ( + "bufio" + "bytes" + "encoding/json" + "fmt" + "io" + "net/http" + "strings" + + "github.com/mattermost/mattermost-plugin-ai/llm" +) + +// AgentCompletion makes a non-streaming completion request to a specific agent by Bot ID. +// The agent parameter should be the Mattermost Bot User ID (an immutable identifier). +func (c *Client) AgentCompletion(agent string, request CompletionRequest) (string, error) { + url := fmt.Sprintf("/%s/bridge/v1/completion/agent/%s/nostream", aiPluginID, agent) + return c.doCompletionRequest(url, request) +} + +// ServiceCompletion makes a non-streaming completion request to a specific service. +// The service parameter can be either a service ID or name (e.g., "openai", "anthropic"). +func (c *Client) ServiceCompletion(service string, request CompletionRequest) (string, error) { + url := fmt.Sprintf("/%s/bridge/v1/completion/service/%s/nostream", aiPluginID, service) + return c.doCompletionRequest(url, request) +} + +// AgentCompletionStream makes a streaming completion request to a specific agent by Bot ID. +// The agent parameter should be the Mattermost Bot User ID (an immutable identifier). +// Returns a TextStreamResult with a Stream channel for processing events. +func (c *Client) AgentCompletionStream(agent string, request CompletionRequest) (*llm.TextStreamResult, error) { + url := fmt.Sprintf("/%s/bridge/v1/completion/agent/%s", aiPluginID, agent) + return c.doStreamingRequest(url, request) +} + +// ServiceCompletionStream makes a streaming completion request to a specific service. +// The service parameter can be either a service ID or name (e.g., "openai", "anthropic"). +// Returns a TextStreamResult with a Stream channel for processing events. +func (c *Client) ServiceCompletionStream(service string, request CompletionRequest) (*llm.TextStreamResult, error) { + url := fmt.Sprintf("/%s/bridge/v1/completion/service/%s", aiPluginID, service) + return c.doStreamingRequest(url, request) +} + +// doCompletionRequest performs a non-streaming completion request +func (c *Client) doCompletionRequest(url string, request CompletionRequest) (string, error) { + // Marshal the request body + body, err := json.Marshal(request) + if err != nil { + return "", fmt.Errorf("failed to marshal request: %w", err) + } + + // Create the HTTP request + req, err := http.NewRequest("POST", url, bytes.NewReader(body)) + if err != nil { + return "", fmt.Errorf("failed to create request: %w", err) + } + + // Set headers + req.Header.Set("Content-Type", "application/json") + + // Make the request + resp, err := c.httpClient.Do(req) + if err != nil { + return "", fmt.Errorf("failed to execute request: %w", err) + } + defer resp.Body.Close() + + // Read the response body + respBody, err := io.ReadAll(resp.Body) + if err != nil { + return "", fmt.Errorf("failed to read response body: %w", err) + } + + // Check for error status codes + if resp.StatusCode != http.StatusOK { + var errResp ErrorResponse + if err := json.Unmarshal(respBody, &errResp); err != nil { + return "", fmt.Errorf("request failed with status %d: %s", resp.StatusCode, string(respBody)) + } + return "", fmt.Errorf("request failed with status %d: %s", resp.StatusCode, errResp.Error) + } + + // Parse the success response + var completionResp CompletionResponse + if err := json.Unmarshal(respBody, &completionResp); err != nil { + return "", fmt.Errorf("failed to unmarshal response: %w", err) + } + + return completionResp.Completion, nil +} + +// doStreamingRequest performs a streaming completion request and returns a TextStreamResult +func (c *Client) doStreamingRequest(url string, request CompletionRequest) (*llm.TextStreamResult, error) { + // Marshal the request body + body, err := json.Marshal(request) + if err != nil { + return nil, fmt.Errorf("failed to marshal request: %w", err) + } + + // Create the HTTP request + req, err := http.NewRequest("POST", url, bytes.NewReader(body)) + if err != nil { + return nil, fmt.Errorf("failed to create request: %w", err) + } + + // Set headers + req.Header.Set("Content-Type", "application/json") + req.Header.Set("Accept", "text/event-stream") + + // Make the request + resp, err := c.httpClient.Do(req) + if err != nil { + return nil, fmt.Errorf("failed to execute request: %w", err) + } + + // Ensure body is closed in all paths + bodyClosed := false + defer func() { + if !bodyClosed { + resp.Body.Close() + } + }() + + // Check for error status codes + if resp.StatusCode != http.StatusOK { + respBody, err := io.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("request failed with status %d", resp.StatusCode) + } + var errResp ErrorResponse + if err := json.Unmarshal(respBody, &errResp); err != nil { + return nil, fmt.Errorf("request failed with status %d: %s", resp.StatusCode, string(respBody)) + } + return nil, fmt.Errorf("request failed with status %d: %s", resp.StatusCode, errResp.Error) + } + + // Create a channel for the stream + stream := make(chan llm.TextStreamEvent) + + // Start a goroutine to read the SSE stream and populate the channel + go func() { + defer resp.Body.Close() + defer close(stream) + + scanner := bufio.NewScanner(resp.Body) + for scanner.Scan() { + line := scanner.Text() + + // SSE lines start with "data: " + if !strings.HasPrefix(line, "data: ") { + continue + } + + // Extract the data portion + data := strings.TrimPrefix(line, "data: ") + + // Check for empty data lines + if data == "" { + continue + } + + // Parse the JSON event + var event llm.TextStreamEvent + if err := json.Unmarshal([]byte(data), &event); err != nil { + // Send an error event + stream <- llm.TextStreamEvent{ + Type: llm.EventTypeError, + Value: fmt.Errorf("error parsing stream event: %w", err), + } + return + } + + // Send the event to the channel + stream <- event + + // If this is an end or error event, stop reading + if event.Type == llm.EventTypeEnd || event.Type == llm.EventTypeError { + return + } + } + + if err := scanner.Err(); err != nil { + stream <- llm.TextStreamEvent{ + Type: llm.EventTypeError, + Value: fmt.Errorf("error reading stream: %w", err), + } + } + }() + + // Mark body as handled by goroutine so defer doesn't close it + bodyClosed = true + + return &llm.TextStreamResult{ + Stream: stream, + }, nil +} diff --git a/vendor/github.com/mattermost/mattermost-plugin-ai/public/bridgeclient/discovery.go b/vendor/github.com/mattermost/mattermost-plugin-ai/public/bridgeclient/discovery.go new file mode 100644 index 00000000..7657130b --- /dev/null +++ b/vendor/github.com/mattermost/mattermost-plugin-ai/public/bridgeclient/discovery.go @@ -0,0 +1,91 @@ +// Copyright (c) 2023-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package bridgeclient + +import ( + "encoding/json" + "fmt" + "io" + "net/http" +) + +// GetAgents retrieves all available agents from the bridge API. +// If userID is provided, only agents accessible to that user are returned. +func (c *Client) GetAgents(userID string) ([]BridgeAgentInfo, error) { + url := fmt.Sprintf("/%s/bridge/v1/agents", aiPluginID) + if userID != "" { + url = fmt.Sprintf("%s?user_id=%s", url, userID) + } + + req, err := http.NewRequest("GET", url, nil) + if err != nil { + return nil, fmt.Errorf("failed to create request: %w", err) + } + + resp, err := c.httpClient.Do(req) + if err != nil { + return nil, fmt.Errorf("failed to execute request: %w", err) + } + defer resp.Body.Close() + + respBody, err := io.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("failed to read response body: %w", err) + } + + if resp.StatusCode != http.StatusOK { + var errResp ErrorResponse + if err := json.Unmarshal(respBody, &errResp); err != nil { + return nil, fmt.Errorf("request failed with status %d: %s", resp.StatusCode, string(respBody)) + } + return nil, fmt.Errorf("request failed with status %d: %s", resp.StatusCode, errResp.Error) + } + + var agentsResp AgentsResponse + if err := json.Unmarshal(respBody, &agentsResp); err != nil { + return nil, fmt.Errorf("failed to unmarshal response: %w", err) + } + + return agentsResp.Agents, nil +} + +// GetServices retrieves all available services from the bridge API. +// If userID is provided, only services accessible to that user (via their permitted bots) are returned. +func (c *Client) GetServices(userID string) ([]BridgeServiceInfo, error) { + url := fmt.Sprintf("/%s/bridge/v1/services", aiPluginID) + if userID != "" { + url = fmt.Sprintf("%s?user_id=%s", url, userID) + } + + req, err := http.NewRequest("GET", url, nil) + if err != nil { + return nil, fmt.Errorf("failed to create request: %w", err) + } + + resp, err := c.httpClient.Do(req) + if err != nil { + return nil, fmt.Errorf("failed to execute request: %w", err) + } + defer resp.Body.Close() + + respBody, err := io.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("failed to read response body: %w", err) + } + + if resp.StatusCode != http.StatusOK { + var errResp ErrorResponse + if err := json.Unmarshal(respBody, &errResp); err != nil { + return nil, fmt.Errorf("request failed with status %d: %s", resp.StatusCode, string(respBody)) + } + return nil, fmt.Errorf("request failed with status %d: %s", resp.StatusCode, errResp.Error) + } + + var servicesResp ServicesResponse + if err := json.Unmarshal(respBody, &servicesResp); err != nil { + return nil, fmt.Errorf("failed to unmarshal response: %w", err) + } + + return servicesResp.Services, nil +} diff --git a/vendor/github.com/mattermost/mattermost-plugin-ai/public/bridgeclient/transport.go b/vendor/github.com/mattermost/mattermost-plugin-ai/public/bridgeclient/transport.go new file mode 100644 index 00000000..3264720d --- /dev/null +++ b/vendor/github.com/mattermost/mattermost-plugin-ai/public/bridgeclient/transport.go @@ -0,0 +1,60 @@ +// Copyright (c) 2023-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package bridgeclient + +import ( + "net/http" + "net/http/httptest" + "strings" + + "github.com/pkg/errors" +) + +// pluginAPIRoundTripper wraps the Mattermost plugin API for HTTP requests +type pluginAPIRoundTripper struct { + api PluginAPI +} + +func (p *pluginAPIRoundTripper) RoundTrip(req *http.Request) (*http.Response, error) { + resp := p.api.PluginHTTP(req) + if resp == nil { + return nil, errors.Errorf("failed to make interplugin request") + } + return resp, nil +} + +// appAPIRoundTripper wraps the Mattermost app layer API for HTTP requests +type appAPIRoundTripper struct { + api AppAPI + userID string +} + +func removeFirstPath(r *http.Request) { + path := r.URL.Path + + // Find the position of the second slash (first slash after the leading one) + secondSlash := strings.Index(path[1:], "/") + + if secondSlash == -1 { + // No second slash found, set to just "/" + r.URL.Path = "/" + return + } + + // Update the path to everything from the second slash onwards + r.URL.Path = path[1+secondSlash:] +} + +func (a *appAPIRoundTripper) RoundTrip(req *http.Request) (*http.Response, error) { + // Create a response recorder to capture the response + recorder := httptest.NewRecorder() + + removeFirstPath(req) + + // Make the inter-plugin request from the server to the AI plugin + a.api.ServeInternalPluginRequest(a.userID, recorder, req, mattermostServerID, aiPluginID) + + // Convert the recorder to an http.Response + return recorder.Result(), nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/LICENSE.txt b/vendor/github.com/mattermost/mattermost/server/public/LICENSE.txt new file mode 100644 index 00000000..d6456956 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/access.go b/vendor/github.com/mattermost/mattermost/server/public/model/access.go new file mode 100644 index 00000000..167e4fb4 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/access.go @@ -0,0 +1,79 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "net/http" +) + +const ( + AccessTokenGrantType = "authorization_code" + AccessTokenType = "bearer" + RefreshTokenGrantType = "refresh_token" +) + +type AccessData struct { + ClientId string `json:"client_id"` + UserId string `json:"user_id"` + Token string `json:"token"` + RefreshToken string `json:"refresh_token"` + RedirectUri string `json:"redirect_uri"` + ExpiresAt int64 `json:"expires_at"` + Scope string `json:"scope"` + Audience string `json:"audience"` +} + +type AccessResponse struct { + AccessToken string `json:"access_token"` + TokenType string `json:"token_type"` + ExpiresInSeconds int32 `json:"expires_in"` + Scope string `json:"scope"` + RefreshToken string `json:"refresh_token"` + IdToken string `json:"id_token"` + Audience string `json:"audience,omitempty"` +} + +// IsValid validates the AccessData and returns an error if it isn't configured +// correctly. +func (ad *AccessData) IsValid() *AppError { + if ad.ClientId == "" || len(ad.ClientId) > 26 { + return NewAppError("AccessData.IsValid", "model.access.is_valid.client_id.app_error", nil, "", http.StatusBadRequest) + } + + if ad.UserId == "" || len(ad.UserId) > 26 { + return NewAppError("AccessData.IsValid", "model.access.is_valid.user_id.app_error", nil, "", http.StatusBadRequest) + } + + if len(ad.Token) != 26 { + return NewAppError("AccessData.IsValid", "model.access.is_valid.access_token.app_error", nil, "", http.StatusBadRequest) + } + + if len(ad.RefreshToken) > 26 { + return NewAppError("AccessData.IsValid", "model.access.is_valid.refresh_token.app_error", nil, "", http.StatusBadRequest) + } + + if ad.RedirectUri == "" || len(ad.RedirectUri) > 256 || !IsValidHTTPURL(ad.RedirectUri) { + return NewAppError("AccessData.IsValid", "model.access.is_valid.redirect_uri.app_error", nil, "", http.StatusBadRequest) + } + + if ad.Audience != "" { + if err := ValidateResourceParameter(ad.Audience, ad.ClientId, "AccessData.IsValid"); err != nil { + return err + } + } + + return nil +} + +func (ad *AccessData) IsExpired() bool { + if ad.ExpiresAt <= 0 { + return false + } + + if GetMillis() > ad.ExpiresAt { + return true + } + + return false +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/access_policy.go b/vendor/github.com/mattermost/mattermost/server/public/model/access_policy.go new file mode 100644 index 00000000..abce3383 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/access_policy.go @@ -0,0 +1,243 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "fmt" + "slices" + + "github.com/pkg/errors" + "golang.org/x/mod/semver" +) + +const ( + AccessControlPolicyTypeParent = "parent" + AccessControlPolicyTypeChannel = "channel" + + MaxPolicyNameLength = 128 + + AccessControlPolicyVersionV0_1 = "v0.1" + AccessControlPolicyVersionV0_2 = "v0.2" +) + +// AccessControlAttribute represents a user attribute with its name and possible values +type AccessControlAttribute struct { + Attribute PropertyField `json:"attribute"` + Values []string `json:"values"` +} + +type AccessControlPolicyTestResponse struct { + Users []*User `json:"users"` + Total int64 `json:"total"` +} + +type GetAccessControlPolicyOptions struct { + Type string `json:"type"` + ParentID string `json:"parent_id"` + Cursor AccessControlPolicyCursor `json:"cursor"` + Limit int `json:"limit"` +} + +type AccessControlPolicySearch struct { + Term string `json:"term"` + Type string `json:"type"` + ParentID string `json:"parent_id"` + IDs []string `json:"ids"` + Cursor AccessControlPolicyCursor `json:"cursor"` + Limit int `json:"limit"` + IncludeChildren bool `json:"include_children"` + Active bool `json:"active"` +} + +type AccessControlPolicyCursor struct { + ID string `json:"id"` +} + +type AccessControlPoliciesWithCount struct { + Policies []*AccessControlPolicy `json:"policies"` + Total int64 `json:"total"` +} + +type AccessControlPolicy struct { + ID string `json:"id"` + Name string `json:"name"` + Type string `json:"type"` + Active bool `json:"active"` + CreateAt int64 `json:"create_at"` + + Revision int `json:"revision"` + Version string `json:"version"` + + Imports []string `json:"imports"` + Rules []AccessControlPolicyRule `json:"rules"` + + Props map[string]any `json:"props"` // add auto-sync property here, also maybe the attributes being used in the expression +} + +type AccessControlPolicyRule struct { + Actions []string `json:"actions"` + Expression string `json:"expression"` +} + +type CELExpressionError struct { + Line int `json:"line"` + Column int `json:"column"` + Message string `json:"message"` +} + +type AccessControlQueryResult struct { + MatchedSubjectIDs []string `json:"matched_subject_ids"` +} + +func (p *AccessControlPolicy) IsValid() *AppError { + switch p.Version { + case AccessControlPolicyVersionV0_1: + return p.accessPolicyVersionV0_1() + case AccessControlPolicyVersionV0_2: + return p.accessPolicyVersionV0_2() + default: + return NewAppError("AccessControlPolicy.IsValid", "model.access_policy.is_valid.version.app_error", nil, "", 400) + } +} + +func (p *AccessControlPolicy) accessPolicyVersionV0_1() *AppError { + if !slices.Contains([]string{AccessControlPolicyTypeParent, AccessControlPolicyTypeChannel}, p.Type) { + return NewAppError("AccessControlPolicy.IsValid", "model.access_policy.is_valid.type.app_error", nil, "", 400) + } + + if !IsValidId(p.ID) { + return NewAppError("AccessControlPolicy.IsValid", "model.access_policy.is_valid.id.app_error", nil, "", 400) + } + + if p.Type == AccessControlPolicyTypeParent && (p.Name == "" || len(p.Name) > MaxPolicyNameLength) { + return NewAppError("AccessControlPolicy.IsValid", "model.access_policy.is_valid.name.app_error", nil, "", 400) + } + + if p.Revision < 0 { + return NewAppError("AccessControlPolicy.IsValid", "model.access_policy.is_valid.revision.app_error", nil, "", 400) + } + + if !semver.IsValid(p.Version) { + return NewAppError("AccessControlPolicy.IsValid", "model.access_policy.is_valid.version.app_error", nil, "", 400) + } + + switch p.Type { + case AccessControlPolicyTypeParent: + if len(p.Rules) == 0 { + return NewAppError("AccessControlPolicy.IsValid", "model.access_policy.is_valid.rules.app_error", nil, "", 400) + } + + if len(p.Imports) > 0 { + return NewAppError("AccessControlPolicy.IsValid", "model.access_policy.is_valid.imports.app_error", nil, "", 400) + } + case AccessControlPolicyTypeChannel: + if len(p.Rules) == 0 && len(p.Imports) == 0 { + return NewAppError("AccessControlPolicy.IsValid", "model.access_policy.is_valid.rules_imports.app_error", nil, "", 400) + } + + if len(p.Rules) == 0 { + return NewAppError("AccessControlPolicy.IsValid", "model.access_policy.is_valid.rules.app_error", nil, "", 400) + } + + if len(p.Imports) > 1 { + return NewAppError("AccessControlPolicy.IsValid", "model.access_policy.is_valid.imports.app_error", nil, "", 400) + } + } + + return nil +} + +func (p *AccessControlPolicy) accessPolicyVersionV0_2() *AppError { + if !slices.Contains([]string{AccessControlPolicyTypeParent, AccessControlPolicyTypeChannel}, p.Type) { + return NewAppError("AccessControlPolicy.IsValid", "model.access_policy.is_valid.type.app_error", nil, "", 400) + } + + if !IsValidId(p.ID) { + return NewAppError("AccessControlPolicy.IsValid", "model.access_policy.is_valid.id.app_error", nil, "", 400) + } + + if p.Type == AccessControlPolicyTypeParent && (p.Name == "" || len(p.Name) > MaxPolicyNameLength) { + return NewAppError("AccessControlPolicy.IsValid", "model.access_policy.is_valid.name.app_error", nil, "", 400) + } + + if p.Revision < 0 { + return NewAppError("AccessControlPolicy.IsValid", "model.access_policy.is_valid.revision.app_error", nil, "", 400) + } + + if !semver.IsValid(p.Version) { + return NewAppError("AccessControlPolicy.IsValid", "model.access_policy.is_valid.version.app_error", nil, "", 400) + } + + switch p.Type { + case AccessControlPolicyTypeParent: + if len(p.Rules) == 0 { + return NewAppError("AccessControlPolicy.IsValid", "model.access_policy.is_valid.rules.app_error", nil, "", 400) + } + + if len(p.Imports) > 0 { + return NewAppError("AccessControlPolicy.IsValid", "model.access_policy.is_valid.imports.app_error", nil, "", 400) + } + case AccessControlPolicyTypeChannel: + if len(p.Rules) == 0 && len(p.Imports) == 0 { + return NewAppError("AccessControlPolicy.IsValid", "model.access_policy.is_valid.rules_imports.app_error", nil, "", 400) + } + } + + return nil +} + +func (p *AccessControlPolicy) Inherit(parent *AccessControlPolicy) *AppError { + rules := make([]AccessControlPolicyRule, len(p.Rules)) + + switch p.Version { + case AccessControlPolicyVersionV0_1: + p.Imports = []string{parent.ID} + for i, rule := range p.Rules { + actions := make([]string, len(rule.Actions)) + copy(actions, rule.Actions) + rules[i] = AccessControlPolicyRule{ + Actions: actions, + Expression: fmt.Sprintf("policies.id_%s", p.ID), + } + } + case AccessControlPolicyVersionV0_2: + if slices.Contains(p.Imports, parent.ID) { + return NewAppError("AccessControlPolicy.Inherit", "model.access_policy.inherit.already_imported.app_error", nil, "", 400) + } + p.Imports = append(p.Imports, parent.ID) + + default: + return NewAppError("AccessControlPolicy.Inherit", "model.access_policy.inherit.version.app_error", nil, "", 400) + } + + if appErr := p.IsValid(); appErr != nil { + return appErr + } + + return nil +} + +func (c *AccessControlPolicyCursor) IsEmpty() bool { + return c.ID == "" +} + +func (c *AccessControlPolicyCursor) IsValid() error { + if c.IsEmpty() { + return nil + } + + if !IsValidId(c.ID) { + return errors.New("cursor id is invalid") + } + + return nil +} + +func (p *AccessControlPolicy) Auditable() map[string]any { + return map[string]any{ + "id": p.ID, + "type": p.Type, + "revision": p.Revision, + } +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/access_request.go b/vendor/github.com/mattermost/mattermost/server/public/model/access_request.go new file mode 100644 index 00000000..74dd7fed --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/access_request.go @@ -0,0 +1,75 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +// Subject represents the user or a virtual entity for which the Authorization +// API is called. +type Subject struct { + // ID is the unique identifier of the Subject. + // it can be a user ID, bot ID, etc and it is scoped to the Type. + ID string `json:"id"` + // Type specifies the type of the Subject, eg. user, bot, etc. + Type string `json:"type"` + // Attributes are the key-value pairs assicuated with the subject. + // An attribute may be single-valued or multi-valued and can be a primitive type + // (string, boolean, number) or a complex type like a JSON object or array. + Attributes map[string]any `json:"attributes"` +} + +type SubjectSearchOptions struct { + Term string `json:"term"` + TeamID string `json:"team_id"` + // Query and Args should be generated within the Access Control Service + // and passed here wrt database driver + Query string `json:"query"` + Args []any `json:"args"` + Limit int `json:"limit"` + Cursor SubjectCursor `json:"cursor"` + AllowInactive bool `json:"allow_inactive"` + IgnoreCount bool `json:"ignore_count"` + // ExcludeChannelMembers is used to exclude members from the search results + // specifically used when syncing channel members + ExcludeChannelMembers string `json:"exclude_members"` + // SubjectID is used to filter search results to a specific user ID + // This is particularly useful for validation queries where we only need to check + // if a specific user matches an expression, rather than fetching all matching users + SubjectID string `json:"subject_id"` +} + +type SubjectCursor struct { + TargetID string `json:"target_id"` +} + +// Resource is the target of an access request. +type Resource struct { + // ID is the unique identifier of the Resource. + // It can be a channel ID, post ID, etc and it is scoped to the Type. + ID string `json:"id"` + // Type specifies the type of the Resource, eg. channel, post, etc. + Type string `json:"type"` +} + +// AccessRequest represents the input to the Policy Decision Point (PDP). +// It contains the Subject, Resource, Action and optional Context attributes. +type AccessRequest struct { + Subject Subject `json:"subject"` + Resource Resource `json:"resource"` + Action string `json:"action"` + Context map[string]any `json:"context,omitempty"` +} + +// The PDP evaluates the request and returns an AccessDecision. +// The Decision field is a boolean indicating whether the request is allowed or not. +type AccessDecision struct { + Decision bool `json:"decision"` + Context map[string]any `json:"context,omitempty"` +} + +type QueryExpressionParams struct { + Expression string `json:"expression"` + Term string `json:"term"` + Limit int `json:"limit"` + After string `json:"after"` + ChannelId string `json:"channelId,omitempty"` +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/analytics_row.go b/vendor/github.com/mattermost/mattermost/server/public/model/analytics_row.go new file mode 100644 index 00000000..72ba3a09 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/analytics_row.go @@ -0,0 +1,11 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +type AnalyticsRow struct { + Name string `json:"name"` + Value float64 `json:"value"` +} + +type AnalyticsRows []*AnalyticsRow diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/audit.go b/vendor/github.com/mattermost/mattermost/server/public/model/audit.go new file mode 100644 index 00000000..3e8345c7 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/audit.go @@ -0,0 +1,14 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +type Audit struct { + Id string `json:"id"` + CreateAt int64 `json:"create_at"` + UserId string `json:"user_id"` + Action string `json:"action"` + ExtraInfo string `json:"extra_info"` + IpAddress string `json:"ip_address"` + SessionId string `json:"session_id"` +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/audit_events.go b/vendor/github.com/mattermost/mattermost/server/public/model/audit_events.go new file mode 100644 index 00000000..2d6bb55d --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/audit_events.go @@ -0,0 +1,440 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +// Access Control & Security +const ( + AuditEventApplyIPFilters = "applyIPFilters" // apply IP address filtering + AuditEventAssignAccessPolicy = "assignAccessPolicy" // assign access control policy to channels + AuditEventCreateAccessControlPolicy = "createAccessControlPolicy" // create access control policy + AuditEventDeleteAccessControlPolicy = "deleteAccessControlPolicy" // delete access control policy + AuditEventUnassignAccessPolicy = "unassignAccessPolicy" // remove access control policy from channels + AuditEventUpdateActiveStatus = "updateActiveStatus" // update active/inactive status of access control policy +) + +// Audit & Certificates +const ( + AuditEventAddAuditLogCertificate = "addAuditLogCertificate" // add certificate for secure audit log transmission + AuditEventGetAudits = "getAudits" // get audit log entries + AuditEventGetUserAudits = "getUserAudits" // get audit log entries for specific user + AuditEventRemoveAuditLogCertificate = "removeAuditLogCertificate" // remove certificate used for audit log transmission +) + +// Bots +const ( + AuditEventAssignBot = "assignBot" // assign bot to user + AuditEventConvertBotToUser = "convertBotToUser" // convert bot account to regular user account + AuditEventConvertUserToBot = "convertUserToBot" // convert regular user account to bot account + AuditEventCreateBot = "createBot" // create bot account + AuditEventPatchBot = "patchBot" // update bot properties + AuditEventUpdateBotActive = "updateBotActive" // enable or disable bot account +) + +// Branding +const ( + AuditEventDeleteBrandImage = "deleteBrandImage" // delete brand image + AuditEventUploadBrandImage = "uploadBrandImage" // upload brand image +) + +// Channel Bookmarks +const ( + AuditEventCreateChannelBookmark = "createChannelBookmark" // create bookmark in channels + AuditEventDeleteChannelBookmark = "deleteChannelBookmark" // delete bookmark + AuditEventUpdateChannelBookmark = "updateChannelBookmark" // update bookmark + AuditEventUpdateChannelBookmarkSortOrder = "updateChannelBookmarkSortOrder" // update display order of bookmarks +) + +// Channel Categories +const ( + AuditEventCreateCategoryForTeamForUser = "createCategoryForTeamForUser" // create channel category for user + AuditEventDeleteCategoryForTeamForUser = "deleteCategoryForTeamForUser" // delete channel category + AuditEventUpdateCategoriesForTeamForUser = "updateCategoriesForTeamForUser" // update multiple channel categories + AuditEventUpdateCategoryForTeamForUser = "updateCategoryForTeamForUser" // update single channel category + AuditEventUpdateCategoryOrderForTeamForUser = "updateCategoryOrderForTeamForUser" // update display order of the categories +) + +// Channels +const ( + AuditEventAddChannelMember = "addChannelMember" // add member to channel + AuditEventConvertGroupMessageToChannel = "convertGroupMessageToChannel" // convert group message to private channel + AuditEventCreateChannel = "createChannel" // create public or private channel + AuditEventCreateDirectChannel = "createDirectChannel" // create direct message channel between two users + AuditEventCreateGroupChannel = "createGroupChannel" // create group message channel with multiple users + AuditEventDeleteChannel = "deleteChannel" // delete channel + AuditEventLocalAddChannelMember = "localAddChannelMember" // add channel member locally + AuditEventLocalCreateChannel = "localCreateChannel" // create channel locally + AuditEventLocalDeleteChannel = "localDeleteChannel" // delete channel locally + AuditEventLocalMoveChannel = "localMoveChannel" // move channel locally + AuditEventLocalPatchChannel = "localPatchChannel" // patch channel locally + AuditEventLocalRemoveChannelMember = "localRemoveChannelMember" // remove channel member locally + AuditEventLocalRestoreChannel = "localRestoreChannel" // restore channel locally + AuditEventLocalUpdateChannelPrivacy = "localUpdateChannelPrivacy" // update channel privacy locally + AuditEventMoveChannel = "moveChannel" // move channel to different team + AuditEventPatchChannel = "patchChannel" // update channel properties + AuditEventPatchChannelModerations = "patchChannelModerations" // update channel moderation settings + AuditEventRemoveChannelMember = "removeChannelMember" // remove member from channel + AuditEventRestoreChannel = "restoreChannel" // restore previously deleted channel + AuditEventUpdateChannel = "updateChannel" // update channel properties + AuditEventUpdateChannelMemberNotifyProps = "updateChannelMemberNotifyProps" // update notification preferences + AuditEventUpdateChannelMemberRoles = "updateChannelMemberRoles" // update roles and permissions + AuditEventUpdateChannelMemberSchemeRoles = "updateChannelMemberSchemeRoles" // update scheme-based roles + AuditEventUpdateChannelPrivacy = "updateChannelPrivacy" // change channel privacy settings + AuditEventUpdateChannelScheme = "updateChannelScheme" // update permission scheme applied to channel +) + +// Commands +const ( + AuditEventCreateCommand = "createCommand" // create slash command + AuditEventDeleteCommand = "deleteCommand" // delete command + AuditEventExecuteCommand = "executeCommand" // execute command + AuditEventLocalCreateCommand = "localCreateCommand" // create command locally + AuditEventMoveCommand = "moveCommand" // move command to another team + AuditEventRegenCommandToken = "regenCommandToken" // regenerate authentication token for command + AuditEventUpdateCommand = "updateCommand" // update command +) + +// Compliance +const ( + AuditEventCreateComplianceReport = "createComplianceReport" // create compliance report + AuditEventDownloadComplianceReport = "downloadComplianceReport" // download compliance report + AuditEventGetComplianceReport = "getComplianceReport" // get specific compliance report + AuditEventGetComplianceReports = "getComplianceReports" // get all compliance reports +) + +// Configuration +const ( + AuditEventConfigReload = "configReload" // reload server configuration + AuditEventGetConfig = "getConfig" // get current server configuration + AuditEventLocalGetClientConfig = "localGetClientConfig" // get client configuration locally + AuditEventLocalGetConfig = "localGetConfig" // get server configuration locally + AuditEventLocalPatchConfig = "localPatchConfig" // update server configuration locally + AuditEventLocalUpdateConfig = "localUpdateConfig" // update server configuration locally + AuditEventMigrateConfig = "migrateConfig" // migrate configs with file values from one store to another + AuditEventPatchConfig = "patchConfig" // update server configuration + AuditEventUpdateConfig = "updateConfig" // update server configuration +) + +// Custom Profile Attributes +const ( + AuditEventCreateCPAField = "createCPAField" // create custom profile attribute + AuditEventDeleteCPAField = "deleteCPAField" // delete custom profile attribute + AuditEventPatchCPAField = "patchCPAField" // update custom profile attribute field + AuditEventPatchCPAValues = "patchCPAValues" // update custom profile attribute values +) + +// Data Retention Policies +const ( + AuditEventAddChannelsToPolicy = "addChannelsToPolicy" // add channels to data retention policy + AuditEventAddTeamsToPolicy = "addTeamsToPolicy" // add teams to data retention policy + AuditEventCreatePolicy = "createPolicy" // create data retention policy + AuditEventDeletePolicy = "deletePolicy" // delete data retention policy + AuditEventPatchPolicy = "patchPolicy" // update data retention policy + AuditEventRemoveChannelsFromPolicy = "removeChannelsFromPolicy" // remove channels from data retention policy + AuditEventRemoveTeamsFromPolicy = "removeTeamsFromPolicy" // remove teams from data retention policy +) + +// Emojis +const ( + AuditEventCreateEmoji = "createEmoji" // create emoji + AuditEventDeleteEmoji = "deleteEmoji" // delete emoji +) + +// Exports +const ( + AuditEventBulkExport = "bulkExport" // bulk export data to a file + AuditEventDeleteExport = "deleteExport" // delete exported file + AuditEventGeneratePresignURLExport = "generatePresignURLExport" // generate presigned URL to download the exported file + AuditEventScheduleExport = "scheduleExport" // schedule export job +) + +// Files +const ( + AuditEventGetFile = "getFile" // get or download file + AuditEventGetFileLink = "getFileLink" // generate link for file sharing + AuditEventUploadFileMultipart = "uploadFileMultipart" // upload file using multipart form data + AuditEventUploadFileMultipartLegacy = "uploadFileMultipartLegacy" // upload file using legacy multipart method + AuditEventUploadFileSimple = "uploadFileSimple" // upload file using simple direct upload method +) + +// Groups +const ( + AuditEventAddGroupMembers = "addGroupMembers" // add members to group + AuditEventAddUserToGroupSyncables = "addUserToGroupSyncables" // add user to group-synchronized teams and channels + AuditEventCreateGroup = "createGroup" // create group + AuditEventDeleteGroup = "deleteGroup" // delete group + AuditEventDeleteGroupMembers = "deleteGroupMembers" // remove members from group + AuditEventLinkGroupSyncable = "linkGroupSyncable" // link group to team or channel for synchronization + AuditEventPatchGroup = "patchGroup" // update group + AuditEventPatchGroupSyncable = "patchGroupSyncable" // update group synchronization settings + AuditEventRestoreGroup = "restoreGroup" // restore previously deleted group + AuditEventUnlinkGroupSyncable = "unlinkGroupSyncable" // unlink group from team or channel synchronization +) + +// Imports +const ( + AuditEventBulkImport = "bulkImport" // bulk import data from a file + AuditEventDeleteImport = "deleteImport" // delete import file + AuditEventSlackImport = "slackImport" // import data from Slack +) + +// Jobs +const ( + AuditEventCancelJob = "cancelJob" // cancel a job + AuditEventCreateJob = "createJob" // create a job + AuditEventJobServer = "jobServer" // start job server + AuditEventUpdateJobStatus = "updateJobStatus" // update status of a job +) + +// LDAP +const ( + AuditEventAddLdapPrivateCertificate = "addLdapPrivateCertificate" // add private certificate for LDAP + AuditEventAddLdapPublicCertificate = "addLdapPublicCertificate" // add public certificate for LDAP + AuditEventIdMigrateLdap = "idMigrateLdap" // migrate user ID mapping to another attribute + AuditEventLinkLdapGroup = "linkLdapGroup" // link LDAP group to Mattermost team or channel + AuditEventRemoveLdapPrivateCertificate = "removeLdapPrivateCertificate" // remove private certificate for LDAP + AuditEventRemoveLdapPublicCertificate = "removeLdapPublicCertificate" // remove public certificate for LDAP + AuditEventSyncLdap = "syncLdap" // synchronize users and groups from LDAP + AuditEventUnlinkLdapGroup = "unlinkLdapGroup" // unlink LDAP group from Mattermost team or channel +) + +// Licensing +const ( + AuditEventAddLicense = "addLicense" // add license + AuditEventLocalAddLicense = "localAddLicense" // add license locally + AuditEventLocalRemoveLicense = "localRemoveLicense" // remove license locally + AuditEventRemoveLicense = "removeLicense" // remove license + AuditEventRequestTrialLicense = "requestTrialLicense" // request trial license +) + +// OAuth +const ( + AuditEventAuthorizeOAuthApp = "authorizeOAuthApp" // authorize OAuth app + AuditEventAuthorizeOAuthPage = "authorizeOAuthPage" // authorize OAuth page + AuditEventCompleteOAuth = "completeOAuth" // complete OAuth authorization flow + AuditEventCreateOAuthApp = "createOAuthApp" // create OAuth app + AuditEventCreateOutgoingOauthConnection = "createOutgoingOauthConnection" // create outgoing OAuth connection + AuditEventDeauthorizeOAuthApp = "deauthorizeOAuthApp" // revoke OAuth app authorization + AuditEventDeleteOAuthApp = "deleteOAuthApp" // delete OAuth app + AuditEventDeleteOutgoingOAuthConnection = "deleteOutgoingOAuthConnection" // delete outgoing OAuth connection + AuditEventGetAccessToken = "getAccessToken" // get OAuth access token + AuditEventLoginWithOAuth = "loginWithOAuth" // login using OAuth authentication provider + AuditEventMobileLoginWithOAuth = "mobileLoginWithOAuth" // mobile application login using OAuth authentication provider + AuditEventRegenerateOAuthAppSecret = "regenerateOAuthAppSecret" // regenerate secret key for OAuth app + AuditEventRegisterOAuthClient = "registerOAuthClient" // register OAuth client via dynamic client registration (RFC 7591) + AuditEventSignupWithOAuth = "signupWithOAuth" // create account using OAuth authentication provider + AuditEventUpdateOAuthApp = "updateOAuthApp" // update OAuth app + AuditEventUpdateOutgoingOAuthConnection = "updateOutgoingOAuthConnection" // update outgoing OAuth connection + AuditEventValidateOutgoingOAuthConnectionCredentials = "validateOutgoingOAuthConnectionCredentials" // validate credentials for outgoing OAuth connection + +) + +// Plugins +const ( + AuditEventDisablePlugin = "disablePlugin" // disable installed plugin + AuditEventEnablePlugin = "enablePlugin" // enable installed plugin + AuditEventGetFirstAdminVisitMarketplaceStatus = "getFirstAdminVisitMarketplaceStatus" // get first admin visit status + AuditEventInstallMarketplacePlugin = "installMarketplacePlugin" // install plugin from official marketplace + AuditEventInstallPluginFromURL = "installPluginFromURL" // install plugin from external URL + AuditEventRemovePlugin = "removePlugin" // delete plugin + AuditEventSetFirstAdminVisitMarketplaceStatus = "setFirstAdminVisitMarketplaceStatus" // set first admin visit status + AuditEventUploadPlugin = "uploadPlugin" // upload plugin file to server for installation +) + +// Posts +const ( + AuditEventCreatePost = "createPost" // create post + AuditEventDeletePost = "deletePost" // delete post + AuditEventLocalDeletePost = "localDeletePost" // delete post locally + AuditEventMoveThread = "moveThread" // move thread and replies to different channel + AuditEventPatchPost = "patchPost" // update post meta properties + AuditEventRestorePostVersion = "restorePostVersion" // restore post to previous version + AuditEventSaveIsPinnedPost = "saveIsPinnedPost" // pin or unpin post + AuditEventSearchPosts = "searchPosts" // search for posts + AuditEventUpdatePost = "updatePost" // update post content +) + +// Preferences +const ( + AuditEventDeletePreferences = "deletePreferences" // delete user preferences + AuditEventUpdatePreferences = "updatePreferences" // update user preferences +) + +// Remote Clusters +const ( + AuditEventCreateRemoteCluster = "createRemoteCluster" // create connection to remote Mattermost cluster + AuditEventDeleteRemoteCluster = "deleteRemoteCluster" // delete connection to remote Mattermost cluster + AuditEventGenerateRemoteClusterInvite = "generateRemoteClusterInvite" // generate invitation token for remote cluster connection + AuditEventInviteRemoteClusterToChannel = "inviteRemoteClusterToChannel" // invite remote cluster users to shared channel + AuditEventPatchRemoteCluster = "patchRemoteCluster" // update remote cluster connection settings + AuditEventRemoteClusterAcceptInvite = "remoteClusterAcceptInvite" // accept invitation from remote cluster + AuditEventRemoteClusterAcceptMessage = "remoteClusterAcceptMessage" // accept message from remote cluster + AuditEventRemoteUploadProfileImage = "remoteUploadProfileImage" // upload profile image from remote cluster + AuditEventUninviteRemoteClusterToChannel = "uninviteRemoteClusterToChannel" // remove remote cluster access from shared channel + AuditEventUploadRemoteData = "uploadRemoteData" // upload data to remote cluster +) + +// Roles +const ( + AuditEventPatchRole = "patchRole" // update role permissions +) + +// SAML +const ( + AuditEventAddSamlIdpCertificate = "addSamlIdpCertificate" // add SAML identity provider certificate + AuditEventAddSamlPrivateCertificate = "addSamlPrivateCertificate" // add SAML private certificate + AuditEventAddSamlPublicCertificate = "addSamlPublicCertificate" // add SAML public certificate + AuditEventCompleteSaml = "completeSaml" // complete SAML authentication flow + AuditEventRemoveSamlIdpCertificate = "removeSamlIdpCertificate" // remove SAML identity provider certificate + AuditEventRemoveSamlPrivateCertificate = "removeSamlPrivateCertificate" // remove SAML private certificate + AuditEventRemoveSamlPublicCertificate = "removeSamlPublicCertificate" // remove SAML public certificate +) + +// Scheduled Posts +const ( + AuditEventCreateSchedulePost = "createSchedulePost" // create post scheduled for future delivery + AuditEventDeleteScheduledPost = "deleteScheduledPost" // delete scheduled post before delivery + AuditEventUpdateScheduledPost = "updateScheduledPost" // update scheduled post +) + +// Schemes +const ( + AuditEventCreateScheme = "createScheme" // create permission scheme with role definitions + AuditEventDeleteScheme = "deleteScheme" // delete scheme + AuditEventPatchScheme = "patchScheme" // update scheme +) + +// Search Indexes +const ( + AuditEventPurgeBleveIndexes = "purgeBleveIndexes" // purge Bleve search indexes + AuditEventPurgeElasticsearchIndexes = "purgeElasticsearchIndexes" // purge Elasticsearch search indexes +) + +// Server Administration +const ( + AuditEventClearServerBusy = "clearServerBusy" // clear server busy status to allow normal operations + AuditEventCompleteOnboarding = "completeOnboarding" // complete system onboarding process + AuditEventDatabaseRecycle = "databaseRecycle" // closes active connections + AuditEventDownloadLogs = "downloadLogs" // download server log files + AuditEventGetAppliedSchemaMigrations = "getAppliedSchemaMigrations" // get list of applied database schema migrations + AuditEventGetLogs = "getLogs" // get server log entries + AuditEventGetOnboarding = "getOnboarding" // get system onboarding status + AuditEventInvalidateCaches = "invalidateCaches" // clear server caches + AuditEventLocalCheckIntegrity = "localCheckIntegrity" // check database integrity locally + AuditEventQueryLogs = "queryLogs" // search server log entries + AuditEventRestartServer = "restartServer" // restart Mattermost server process + AuditEventSetServerBusy = "setServerBusy" // set server busy status to disallow any operations + AuditEventUpdateViewedProductNotices = "updateViewedProductNotices" // update viewed status of product notices + AuditEventUpgradeToEnterprise = "upgradeToEnterprise" // upgrade server to Enterprise edition +) + +// Teams +const ( + AuditEventAddTeamMember = "addTeamMember" // add member to team + AuditEventAddTeamMembers = "addTeamMembers" // add multiple members to team + AuditEventAddUserToTeamFromInvite = "addUserToTeamFromInvite" // add user to team using invitation link + AuditEventCreateTeam = "createTeam" // create team + AuditEventDeleteTeam = "deleteTeam" // delete team + AuditEventImportTeam = "importTeam" // import team data from external source + AuditEventInvalidateAllEmailInvites = "invalidateAllEmailInvites" // invalidate all pending email invitations + AuditEventInviteGuestsToChannels = "inviteGuestsToChannels" // invite guest users to specific channels + AuditEventInviteUsersToTeam = "inviteUsersToTeam" // invite users to team + AuditEventLocalCreateTeam = "localCreateTeam" // create team locally + AuditEventLocalDeleteTeam = "localDeleteTeam" // delete team locally + AuditEventLocalInviteUsersToTeam = "localInviteUsersToTeam" // invite users to team locally + AuditEventPatchTeam = "patchTeam" // update team properties + AuditEventRegenerateTeamInviteId = "regenerateTeamInviteId" // regenerate team invitation ID + AuditEventRemoveTeamIcon = "removeTeamIcon" // remove custom icon from team + AuditEventRemoveTeamMember = "removeTeamMember" // remove member from team + AuditEventRestoreTeam = "restoreTeam" // restore previously deleted team + AuditEventSetTeamIcon = "setTeamIcon" // set custom icon for team + AuditEventUpdateTeam = "updateTeam" // update team properties + AuditEventUpdateTeamMemberRoles = "updateTeamMemberRoles" // update roles of team members + AuditEventUpdateTeamMemberSchemeRoles = "updateTeamMemberSchemeRoles" // update scheme-based roles of team members + AuditEventUpdateTeamPrivacy = "updateTeamPrivacy" // change team privacy settings + AuditEventUpdateTeamScheme = "updateTeamScheme" // update scheme applied to team +) + +// Terms of Service +const ( + AuditEventCreateTermsOfService = "createTermsOfService" // create terms of service + AuditEventSaveUserTermsOfService = "saveUserTermsOfService" // save user acceptance of terms of service +) + +// Threads +const ( + AuditEventFollowThreadByUser = "followThreadByUser" // follow thread to receive notifications about replies + AuditEventSetUnreadThreadByPostId = "setUnreadThreadByPostId" // mark thread as unread for user by post ID + AuditEventUnfollowThreadByUser = "unfollowThreadByUser" // unfollow thread to stop receiving notifications about replies + AuditEventUpdateReadStateAllThreadsByUser = "updateReadStateAllThreadsByUser" // update read status for all threads for user + AuditEventUpdateReadStateThreadByUser = "updateReadStateThreadByUser" // update read status for specific thread for user +) + +// Uploads +const ( + AuditEventCreateUpload = "createUpload" // create file upload session + AuditEventUploadData = "uploadData" // upload file data to server storage +) + +// Users +const ( + AuditEventAttachDeviceId = "attachDeviceId" // attach device ID to user session for mobile app + AuditEventCreateUser = "createUser" // create user account + AuditEventCreateUserAccessToken = "createUserAccessToken" // create personal access token for user API access + AuditEventDeleteUser = "deleteUser" // delete user account + AuditEventDemoteUserToGuest = "demoteUserToGuest" // demote regular user to guest account with limited permissions + AuditEventDisableUserAccessToken = "disableUserAccessToken" // disable user personal access token + AuditEventEnableUserAccessToken = "enableUserAccessToken" // enable user personal access token + AuditEventExtendSessionExpiry = "extendSessionExpiry" // extend user session expiration time + AuditEventLocalDeleteUser = "localDeleteUser" // delete user locally + AuditEventLocalPermanentDeleteAllUsers = "localPermanentDeleteAllUsers" // permanently delete all users locally + AuditEventLogin = "login" // user login to system + AuditEventLogout = "logout" // user logout from system + AuditEventMigrateAuthToLdap = "migrateAuthToLdap" // migrate user authentication method to LDAP + AuditEventMigrateAuthToSaml = "migrateAuthToSaml" // migrate user authentication method to SAML + AuditEventPatchUser = "patchUser" // update user properties + AuditEventPromoteGuestToUser = "promoteGuestToUser" // promote guest account to regular user + AuditEventResetPassword = "resetPassword" // reset user password + AuditEventResetPasswordFailedAttempts = "resetPasswordFailedAttempts" // reset failed password attempt counter + AuditEventRevokeAllSessionsAllUsers = "revokeAllSessionsAllUsers" // revoke all active sessions for all users + AuditEventRevokeAllSessionsForUser = "revokeAllSessionsForUser" // revoke all active sessions for specific user + AuditEventRevokeSession = "revokeSession" // revoke specific user session + AuditEventRevokeUserAccessToken = "revokeUserAccessToken" // revoke user personal access token + AuditEventSendPasswordReset = "sendPasswordReset" // send password reset email to user + AuditEventSendVerificationEmail = "sendVerificationEmail" // send email verification link to user + AuditEventSetDefaultProfileImage = "setDefaultProfileImage" // set user profile image to default avatar + AuditEventSetProfileImage = "setProfileImage" // set custom profile image for user + AuditEventSwitchAccountType = "switchAccountType" // switch user authentication method from one to another + AuditEventUpdatePassword = "updatePassword" // update user password + AuditEventUpdateUser = "updateUser" // update user account properties + AuditEventUpdateUserActive = "updateUserActive" // update user active status + AuditEventUpdateUserAuth = "updateUserAuth" // update user authentication method + AuditEventUpdateUserMfa = "updateUserMfa" // update user multi-factor authentication settings + AuditEventUpdateUserRoles = "updateUserRoles" // update user roles + AuditEventVerifyUserEmail = "verifyUserEmail" // verify user email address using verification token + AuditEventVerifyUserEmailWithoutToken = "verifyUserEmailWithoutToken" // verify user email address without verification token +) + +// Webhooks +const ( + AuditEventCreateIncomingHook = "createIncomingHook" // create incoming webhook + AuditEventCreateOutgoingHook = "createOutgoingHook" // create outgoing webhook + AuditEventDeleteIncomingHook = "deleteIncomingHook" // delete incoming webhook + AuditEventDeleteOutgoingHook = "deleteOutgoingHook" // delete outgoing webhook + AuditEventGetIncomingHook = "getIncomingHook" // get incoming webhook details + AuditEventGetOutgoingHook = "getOutgoingHook" // get outgoing webhook details + AuditEventLocalCreateIncomingHook = "localCreateIncomingHook" // create incoming webhook locally + AuditEventRegenOutgoingHookToken = "regenOutgoingHookToken" // regenerate authentication token + AuditEventUpdateIncomingHook = "updateIncomingHook" // update incoming webhook + AuditEventUpdateOutgoingHook = "updateOutgoingHook" // update outgoing webhook +) + +// Content Flagging +const ( + AuditEventFlagPost = "flagPost" // flag post for review + AuditEventGetFlaggedPost = "getFlaggedPost" // get flagged post details + AuditEventPermanentlyRemoveFlaggedPost = "permanentlyRemoveFlaggedPost" // permanently remove flagged post + AuditEventKeepFlaggedPost = "keepFlaggedPost" // keep flagged post + AuditEventUpdateContentFlaggingConfig = "updateContentFlaggingConfig" // update content flagging configuration + AuditEventSetReviewer = "setFlaggedPostReviewer" // assign reviewer for flagged post +) diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/audit_record.go b/vendor/github.com/mattermost/mattermost/server/public/model/audit_record.go new file mode 100644 index 00000000..3f21521d --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/audit_record.go @@ -0,0 +1,149 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +const ( + AuditKeyActor = "actor" + AuditKeyAPIPath = "api_path" + AuditKeyEvent = "event" + AuditKeyEventData = "event_data" + AuditKeyEventName = "event_name" + AuditKeyMeta = "meta" + AuditKeyError = "error" + AuditKeyStatus = "status" + AuditKeyUserID = "user_id" + AuditKeySessionID = "session_id" + AuditKeyClient = "client" + AuditKeyIPAddress = "ip_address" + AuditKeyClusterID = "cluster_id" + + AuditStatusSuccess = "success" + AuditStatusAttempt = "attempt" + AuditStatusFail = "fail" +) + +// AuditRecord provides a consistent set of fields used for all audit logging. +type AuditRecord struct { + EventName string `json:"event_name"` + Status string `json:"status"` + EventData AuditEventData `json:"event"` + Actor AuditEventActor `json:"actor"` + Meta map[string]any `json:"meta"` + Error AuditEventError `json:"error"` +} + +// AuditEventData contains all event specific data about the modified entity +type AuditEventData struct { + Parameters map[string]any `json:"parameters"` // Payload and parameters being processed as part of the request + PriorState map[string]any `json:"prior_state"` // Prior state of the object being modified, nil if no prior state + ResultState map[string]any `json:"resulting_state"` // Resulting object after creating or modifying it + ObjectType string `json:"object_type"` // String representation of the object type. eg. "post" +} + +// AuditEventActor is the subject triggering the event +type AuditEventActor struct { + UserId string `json:"user_id"` + SessionId string `json:"session_id"` + Client string `json:"client"` + IpAddress string `json:"ip_address"` + XForwardedFor string `json:"x_forwarded_for"` +} + +// EventMeta is a key-value store to store related information to the event that is not directly related to the modified entity +type EventMeta struct { + ApiPath string `json:"api_path"` + ClusterId string `json:"cluster_id"` +} + +// AuditEventError contains error information in case of failure of the event +type AuditEventError struct { + Description string `json:"description,omitempty"` + Code int `json:"status_code,omitempty"` +} + +// Auditable for sensitive object classes, consider implementing Auditable and include whatever the +// AuditableObject returns. For example: it's likely OK to write a user object to the +// audit logs, but not the user password in cleartext or hashed form +type Auditable interface { + Auditable() map[string]any +} + +// Success marks the audit record status as successful. +func (rec *AuditRecord) Success() { + rec.Status = AuditStatusSuccess +} + +// Fail marks the audit record status as failed. +func (rec *AuditRecord) Fail() { + rec.Status = AuditStatusFail +} + +// AddEventParameterToAuditRec adds a parameter, e.g. query or post body, to the event +func AddEventParameterToAuditRec[T string | bool | int | int64 | []string | map[string]string](rec *AuditRecord, key string, val T) { + if rec.EventData.Parameters == nil { + rec.EventData.Parameters = make(map[string]any) + } + + rec.EventData.Parameters[key] = val +} + +// AddEventParameterAuditableToAuditRec adds an object that is of type Auditable to the event +func AddEventParameterAuditableToAuditRec(rec *AuditRecord, key string, val Auditable) { + if rec.EventData.Parameters == nil { + rec.EventData.Parameters = make(map[string]any) + } + + rec.EventData.Parameters[key] = val.Auditable() +} + +// AddEventParameterAuditableArrayToAuditRec adds an array of objects of type Auditable to the event +func AddEventParameterAuditableArrayToAuditRec[T Auditable](rec *AuditRecord, key string, val []T) { + if rec.EventData.Parameters == nil { + rec.EventData.Parameters = make(map[string]any) + } + + processedAuditables := make([]map[string]any, 0, len(val)) + for _, auditableVal := range val { + processedAuditables = append(processedAuditables, auditableVal.Auditable()) + } + + rec.EventData.Parameters[key] = processedAuditables +} + +// AddEventPriorState adds the prior state of the modified object to the audit record +func (rec *AuditRecord) AddEventPriorState(object Auditable) { + rec.EventData.PriorState = object.Auditable() +} + +// AddEventResultState adds the result state of the modified object to the audit record +func (rec *AuditRecord) AddEventResultState(object Auditable) { + rec.EventData.ResultState = object.Auditable() +} + +// AddEventObjectType adds the object type of the modified object to the audit record +func (rec *AuditRecord) AddEventObjectType(objectType string) { + rec.EventData.ObjectType = objectType +} + +// AddMeta adds a key/value entry to the audit record that can be used for related information not directly related to +// the modified object, e.g. authentication method +func (rec *AuditRecord) AddMeta(name string, val any) { + rec.Meta[name] = val +} + +// AddErrorCode adds the error code for a failed event to the audit record +func (rec *AuditRecord) AddErrorCode(code int) { + rec.Error.Code = code +} + +// AddErrorDesc adds the error description for a failed event to the audit record +func (rec *AuditRecord) AddErrorDesc(description string) { + rec.Error.Description = description +} + +// AddAppError adds an AppError to the audit record +func (rec *AuditRecord) AddAppError(err *AppError) { + rec.AddErrorCode(err.StatusCode) + rec.AddErrorDesc(err.Error()) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/auditconv.go b/vendor/github.com/mattermost/mattermost/server/public/model/auditconv.go new file mode 100644 index 00000000..be7ac04a --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/auditconv.go @@ -0,0 +1,776 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "strings" + + "github.com/francoispqt/gojay" +) + +// AuditModelTypeConv converts key model types to something better suited for audit output. +func AuditModelTypeConv(val any) (newVal any, converted bool) { + if val == nil { + return nil, false + } + switch v := val.(type) { + case *Channel: + return newAuditChannel(v), true + case Channel: + return newAuditChannel(&v), true + case *Team: + return newAuditTeam(v), true + case Team: + return newAuditTeam(&v), true + case *User: + return newAuditUser(v), true + case User: + return newAuditUser(&v), true + case *UserPatch: + return newAuditUserPatch(v), true + case UserPatch: + return newAuditUserPatch(&v), true + case *Command: + return newAuditCommand(v), true + case Command: + return newAuditCommand(&v), true + case *CommandArgs: + return newAuditCommandArgs(v), true + case CommandArgs: + return newAuditCommandArgs(&v), true + case *Bot: + return newAuditBot(v), true + case Bot: + return newAuditBot(&v), true + case *ChannelModerationPatch: + return newAuditChannelModerationPatch(v), true + case ChannelModerationPatch: + return newAuditChannelModerationPatch(&v), true + case *Emoji: + return newAuditEmoji(v), true + case Emoji: + return newAuditEmoji(&v), true + case *FileInfo: + return newAuditFileInfo(v), true + case FileInfo: + return newAuditFileInfo(&v), true + case *Group: + return newAuditGroup(v), true + case Group: + return newAuditGroup(&v), true + case *Job: + return newAuditJob(v), true + case Job: + return newAuditJob(&v), true + case *OAuthApp: + return newAuditOAuthApp(v), true + case OAuthApp: + return newAuditOAuthApp(&v), true + case *Post: + return newAuditPost(v), true + case Post: + return newAuditPost(&v), true + case *Role: + return newAuditRole(v), true + case Role: + return newAuditRole(&v), true + case *Scheme: + return newAuditScheme(v), true + case Scheme: + return newAuditScheme(&v), true + case *SchemeRoles: + return newAuditSchemeRoles(v), true + case SchemeRoles: + return newAuditSchemeRoles(&v), true + case *Session: + return newAuditSession(v), true + case Session: + return newAuditSession(&v), true + case *IncomingWebhook: + return newAuditIncomingWebhook(v), true + case IncomingWebhook: + return newAuditIncomingWebhook(&v), true + case *OutgoingWebhook: + return newAuditOutgoingWebhook(v), true + case OutgoingWebhook: + return newAuditOutgoingWebhook(&v), true + case *RemoteCluster: + return newRemoteCluster(v), true + case RemoteCluster: + return newRemoteCluster(&v), true + } + return val, false +} + +type auditChannel struct { + ID string + Name string + Type ChannelType +} + +// newAuditChannel creates a simplified representation of Channel for output to audit log. +func newAuditChannel(c *Channel) auditChannel { + var channel auditChannel + if c != nil { + channel.ID = c.Id + channel.Name = c.Name + channel.Type = c.Type + } + return channel +} + +func (c auditChannel) MarshalJSONObject(enc *gojay.Encoder) { + enc.StringKey("id", c.ID) + enc.StringKey("name", c.Name) + enc.StringKey("type", string(c.Type)) +} + +func (c auditChannel) IsNil() bool { + return false +} + +type auditTeam struct { + ID string + Name string + Type string +} + +// newAuditTeam creates a simplified representation of Team for output to audit log. +func newAuditTeam(t *Team) auditTeam { + var team auditTeam + if t != nil { + team.ID = t.Id + team.Name = t.Name + team.Type = t.Type + } + return team +} + +func (t auditTeam) MarshalJSONObject(enc *gojay.Encoder) { + enc.StringKey("id", t.ID) + enc.StringKey("name", t.Name) + enc.StringKey("type", t.Type) +} + +func (t auditTeam) IsNil() bool { + return false +} + +type auditUser struct { + ID string + Name string + Roles string +} + +// newAuditUser creates a simplified representation of User for output to audit log. +func newAuditUser(u *User) auditUser { + var user auditUser + if u != nil { + user.ID = u.Id + user.Name = u.Username + user.Roles = u.Roles + } + return user +} + +type auditUserPatch struct { + Name string +} + +// newAuditUserPatch creates a simplified representation of UserPatch for output to audit log. +func newAuditUserPatch(up *UserPatch) auditUserPatch { + var userPatch auditUserPatch + if up != nil { + if up.Username != nil { + userPatch.Name = *up.Username + } + } + return userPatch +} + +func (u auditUser) MarshalJSONObject(enc *gojay.Encoder) { + enc.StringKey("id", u.ID) + enc.StringKey("name", u.Name) + enc.StringKey("roles", u.Roles) +} + +func (u auditUser) IsNil() bool { + return false +} + +type auditCommand struct { + ID string + CreatorID string + TeamID string + Trigger string + Method string + Username string + IconURL string + AutoComplete bool + AutoCompleteDesc string + AutoCompleteHint string + DisplayName string + Description string + URL string +} + +// newAuditCommand creates a simplified representation of Command for output to audit log. +func newAuditCommand(c *Command) auditCommand { + var cmd auditCommand + if c != nil { + cmd.ID = c.Id + cmd.CreatorID = c.CreatorId + cmd.TeamID = c.TeamId + cmd.Trigger = c.Trigger + cmd.Method = c.Method + cmd.Username = c.Username + cmd.IconURL = c.IconURL + cmd.AutoComplete = c.AutoComplete + cmd.AutoCompleteDesc = c.AutoCompleteDesc + cmd.AutoCompleteHint = c.AutoCompleteHint + cmd.DisplayName = c.DisplayName + cmd.Description = c.Description + cmd.URL = c.URL + } + return cmd +} + +func (cmd auditCommand) MarshalJSONObject(enc *gojay.Encoder) { + enc.StringKey("id", cmd.ID) + enc.StringKey("creator_id", cmd.CreatorID) + enc.StringKey("team_id", cmd.TeamID) + enc.StringKey("trigger", cmd.Trigger) + enc.StringKey("method", cmd.Method) + enc.StringKey("username", cmd.Username) + enc.StringKey("icon_url", cmd.IconURL) + enc.BoolKey("auto_complete", cmd.AutoComplete) + enc.StringKey("auto_complete_desc", cmd.AutoCompleteDesc) + enc.StringKey("auto_complete_hint", cmd.AutoCompleteHint) + enc.StringKey("display", cmd.DisplayName) + enc.StringKey("desc", cmd.Description) + enc.StringKey("url", cmd.URL) +} + +func (cmd auditCommand) IsNil() bool { + return false +} + +type auditCommandArgs struct { + ChannelID string + TeamID string + TriggerID string + Command string +} + +// newAuditCommandArgs creates a simplified representation of CommandArgs for output to audit log. +func newAuditCommandArgs(ca *CommandArgs) auditCommandArgs { + var cmdargs auditCommandArgs + if ca != nil { + cmdargs.ChannelID = ca.ChannelId + cmdargs.TeamID = ca.TeamId + cmdargs.TriggerID = ca.TriggerId + cmdFields := strings.Fields(ca.Command) + if len(cmdFields) > 0 { + cmdargs.Command = cmdFields[0] + } + } + return cmdargs +} + +func (ca auditCommandArgs) MarshalJSONObject(enc *gojay.Encoder) { + enc.StringKey("channel_id", ca.ChannelID) + enc.StringKey("team_id", ca.TriggerID) + enc.StringKey("trigger_id", ca.TeamID) + enc.StringKey("command", ca.Command) +} + +func (ca auditCommandArgs) IsNil() bool { + return false +} + +type auditBot struct { + UserID string + Username string + Displayname string +} + +// newAuditBot creates a simplified representation of Bot for output to audit log. +func newAuditBot(b *Bot) auditBot { + var bot auditBot + if b != nil { + bot.UserID = b.UserId + bot.Username = b.Username + bot.Displayname = b.DisplayName + } + return bot +} + +func (b auditBot) MarshalJSONObject(enc *gojay.Encoder) { + enc.StringKey("user_id", b.UserID) + enc.StringKey("username", b.Username) + enc.StringKey("display", b.Displayname) +} + +func (b auditBot) IsNil() bool { + return false +} + +type auditChannelModerationPatch struct { + Name string + RoleGuests bool + RoleMembers bool +} + +// newAuditChannelModerationPatch creates a simplified representation of ChannelModerationPatch for output to audit log. +func newAuditChannelModerationPatch(p *ChannelModerationPatch) auditChannelModerationPatch { + var patch auditChannelModerationPatch + if p != nil { + if p.Name != nil { + patch.Name = *p.Name + } + if p.Roles.Guests != nil { + patch.RoleGuests = *p.Roles.Guests + } + if p.Roles.Members != nil { + patch.RoleMembers = *p.Roles.Members + } + } + return patch +} + +func (p auditChannelModerationPatch) MarshalJSONObject(enc *gojay.Encoder) { + enc.StringKey("name", p.Name) + enc.BoolKey("role_guests", p.RoleGuests) + enc.BoolKey("role_members", p.RoleMembers) +} + +func (p auditChannelModerationPatch) IsNil() bool { + return false +} + +type auditEmoji struct { + ID string + Name string +} + +// newAuditEmoji creates a simplified representation of Emoji for output to audit log. +func newAuditEmoji(e *Emoji) auditEmoji { + var emoji auditEmoji + if e != nil { + emoji.ID = e.Id + emoji.Name = e.Name + } + return emoji +} + +func (e auditEmoji) MarshalJSONObject(enc *gojay.Encoder) { + enc.StringKey("id", e.ID) + enc.StringKey("name", e.Name) +} + +func (e auditEmoji) IsNil() bool { + return false +} + +type auditFileInfo struct { + ID string + PostID string + Path string + Name string + Extension string + Size int64 +} + +// newAuditFileInfo creates a simplified representation of FileInfo for output to audit log. +func newAuditFileInfo(f *FileInfo) auditFileInfo { + var fi auditFileInfo + if f != nil { + fi.ID = f.Id + fi.PostID = f.PostId + fi.Path = f.Path + fi.Name = f.Name + fi.Extension = f.Extension + fi.Size = f.Size + } + return fi +} + +func (fi auditFileInfo) MarshalJSONObject(enc *gojay.Encoder) { + enc.StringKey("id", fi.ID) + enc.StringKey("post_id", fi.PostID) + enc.StringKey("path", fi.Path) + enc.StringKey("name", fi.Name) + enc.StringKey("ext", fi.Extension) + enc.Int64Key("size", fi.Size) +} + +func (fi auditFileInfo) IsNil() bool { + return false +} + +type auditGroup struct { + ID string + Name string + DisplayName string + Description string +} + +// newAuditGroup creates a simplified representation of Group for output to audit log. +func newAuditGroup(g *Group) auditGroup { + var group auditGroup + if g != nil { + group.ID = g.Id + if g.Name == nil { + group.Name = "" + } else { + group.Name = *g.Name + } + group.DisplayName = g.DisplayName + group.Description = g.Description + } + return group +} + +func (g auditGroup) MarshalJSONObject(enc *gojay.Encoder) { + enc.StringKey("id", g.ID) + enc.StringKey("name", g.Name) + enc.StringKey("display", g.DisplayName) + enc.StringKey("desc", g.Description) +} + +func (g auditGroup) IsNil() bool { + return false +} + +type auditJob struct { + ID string + Type string + Priority int64 + StartAt int64 +} + +// newAuditJob creates a simplified representation of Job for output to audit log. +func newAuditJob(j *Job) auditJob { + var job auditJob + if j != nil { + job.ID = j.Id + job.Type = j.Type + job.Priority = j.Priority + job.StartAt = j.StartAt + } + return job +} + +func (j auditJob) MarshalJSONObject(enc *gojay.Encoder) { + enc.StringKey("id", j.ID) + enc.StringKey("type", j.Type) + enc.Int64Key("priority", j.Priority) + enc.Int64Key("start_at", j.StartAt) +} + +func (j auditJob) IsNil() bool { + return false +} + +type auditOAuthApp struct { + ID string + CreatorID string + Name string + Description string + IsTrusted bool +} + +// newAuditOAuthApp creates a simplified representation of OAuthApp for output to audit log. +func newAuditOAuthApp(o *OAuthApp) auditOAuthApp { + var oauth auditOAuthApp + if o != nil { + oauth.ID = o.Id + oauth.CreatorID = o.CreatorId + oauth.Name = o.Name + oauth.Description = o.Description + oauth.IsTrusted = o.IsTrusted + } + return oauth +} + +func (o auditOAuthApp) MarshalJSONObject(enc *gojay.Encoder) { + enc.StringKey("id", o.ID) + enc.StringKey("creator_id", o.CreatorID) + enc.StringKey("name", o.Name) + enc.StringKey("desc", o.Description) + enc.BoolKey("trusted", o.IsTrusted) +} + +func (o auditOAuthApp) IsNil() bool { + return false +} + +type auditPost struct { + ID string + ChannelID string + Type string + IsPinned bool +} + +// newAuditPost creates a simplified representation of Post for output to audit log. +func newAuditPost(p *Post) auditPost { + var post auditPost + if p != nil { + post.ID = p.Id + post.ChannelID = p.ChannelId + post.Type = p.Type + post.IsPinned = p.IsPinned + } + return post +} + +func (p auditPost) MarshalJSONObject(enc *gojay.Encoder) { + enc.StringKey("id", p.ID) + enc.StringKey("channel_id", p.ChannelID) + enc.StringKey("type", p.Type) + enc.BoolKey("pinned", p.IsPinned) +} + +func (p auditPost) IsNil() bool { + return false +} + +type auditRole struct { + ID string + Name string + DisplayName string + Permissions []string + SchemeManaged bool + BuiltIn bool +} + +// newAuditRole creates a simplified representation of Role for output to audit log. +func newAuditRole(r *Role) auditRole { + var role auditRole + if r != nil { + role.ID = r.Id + role.Name = r.Name + role.DisplayName = r.DisplayName + role.Permissions = append(role.Permissions, r.Permissions...) + role.SchemeManaged = r.SchemeManaged + role.BuiltIn = r.BuiltIn + } + return role +} + +func (r auditRole) MarshalJSONObject(enc *gojay.Encoder) { + enc.StringKey("id", r.ID) + enc.StringKey("name", r.Name) + enc.StringKey("display", r.DisplayName) + enc.SliceStringKey("perms", r.Permissions) + enc.BoolKey("schemeManaged", r.SchemeManaged) + enc.BoolKey("builtin", r.BuiltIn) +} + +func (r auditRole) IsNil() bool { + return false +} + +type auditScheme struct { + ID string + Name string + DisplayName string + Scope string +} + +// newAuditScheme creates a simplified representation of Scheme for output to audit log. +func newAuditScheme(s *Scheme) auditScheme { + var scheme auditScheme + if s != nil { + scheme.ID = s.Id + scheme.Name = s.Name + scheme.DisplayName = s.DisplayName + scheme.Scope = s.Scope + } + return scheme +} + +func (s auditScheme) MarshalJSONObject(enc *gojay.Encoder) { + enc.StringKey("id", s.ID) + enc.StringKey("name", s.Name) + enc.StringKey("display", s.DisplayName) + enc.StringKey("scope", s.Scope) +} + +func (s auditScheme) IsNil() bool { + return false +} + +type auditSchemeRoles struct { + SchemeAdmin bool + SchemeUser bool + SchemeGuest bool +} + +// newAuditSchemeRoles creates a simplified representation of SchemeRoles for output to audit log. +func newAuditSchemeRoles(s *SchemeRoles) auditSchemeRoles { + var roles auditSchemeRoles + if s != nil { + roles.SchemeAdmin = s.SchemeAdmin + roles.SchemeUser = s.SchemeUser + roles.SchemeGuest = s.SchemeGuest + } + return roles +} + +func (s auditSchemeRoles) MarshalJSONObject(enc *gojay.Encoder) { + enc.BoolKey("admin", s.SchemeAdmin) + enc.BoolKey("user", s.SchemeUser) + enc.BoolKey("guest", s.SchemeGuest) +} + +func (s auditSchemeRoles) IsNil() bool { + return false +} + +type auditSession struct { + ID string + UserId string + DeviceId string +} + +// newAuditSession creates a simplified representation of Session for output to audit log. +func newAuditSession(s *Session) auditSession { + var session auditSession + if s != nil { + session.ID = s.Id + session.UserId = s.UserId + session.DeviceId = s.DeviceId + } + return session +} + +func (s auditSession) MarshalJSONObject(enc *gojay.Encoder) { + enc.StringKey("id", s.ID) + enc.StringKey("user_id", s.UserId) + enc.StringKey("device_id", s.DeviceId) +} + +func (s auditSession) IsNil() bool { + return false +} + +type auditIncomingWebhook struct { + ID string + ChannelID string + TeamId string + DisplayName string + Description string +} + +// newAuditIncomingWebhook creates a simplified representation of IncomingWebhook for output to audit log. +func newAuditIncomingWebhook(h *IncomingWebhook) auditIncomingWebhook { + var hook auditIncomingWebhook + if h != nil { + hook.ID = h.Id + hook.ChannelID = h.ChannelId + hook.TeamId = h.TeamId + hook.DisplayName = h.DisplayName + hook.Description = h.Description + } + return hook +} + +func (h auditIncomingWebhook) MarshalJSONObject(enc *gojay.Encoder) { + enc.StringKey("id", h.ID) + enc.StringKey("channel_id", h.ChannelID) + enc.StringKey("team_id", h.TeamId) + enc.StringKey("display", h.DisplayName) + enc.StringKey("desc", h.Description) +} + +func (h auditIncomingWebhook) IsNil() bool { + return false +} + +type auditOutgoingWebhook struct { + ID string + ChannelID string + TeamID string + TriggerWords StringArray + TriggerWhen int + DisplayName string + Description string + ContentType string + Username string +} + +// newAuditOutgoingWebhook creates a simplified representation of OutgoingWebhook for output to audit log. +func newAuditOutgoingWebhook(h *OutgoingWebhook) auditOutgoingWebhook { + var hook auditOutgoingWebhook + if h != nil { + hook.ID = h.Id + hook.ChannelID = h.ChannelId + hook.TeamID = h.TeamId + hook.TriggerWords = h.TriggerWords + hook.TriggerWhen = h.TriggerWhen + hook.DisplayName = h.DisplayName + hook.Description = h.Description + hook.ContentType = h.ContentType + hook.Username = h.Username + } + return hook +} + +func (h auditOutgoingWebhook) MarshalJSONObject(enc *gojay.Encoder) { + enc.StringKey("id", h.ID) + enc.StringKey("channel_id", h.ChannelID) + enc.StringKey("team_id", h.TeamID) + enc.SliceStringKey("trigger_words", h.TriggerWords) + enc.IntKey("trigger_when", h.TriggerWhen) + enc.StringKey("display", h.DisplayName) + enc.StringKey("desc", h.Description) + enc.StringKey("content_type", h.ContentType) + enc.StringKey("username", h.Username) +} + +func (h auditOutgoingWebhook) IsNil() bool { + return false +} + +type auditRemoteCluster struct { + RemoteId string + RemoteTeamId string + Name string + DisplayName string + SiteURL string + CreateAt int64 + LastPingAt int64 + CreatorId string +} + +// newRemoteCluster creates a simplified representation of RemoteCluster for output to audit log. +func newRemoteCluster(r *RemoteCluster) auditRemoteCluster { + var rc auditRemoteCluster + if r != nil { + rc.RemoteId = r.RemoteId + rc.Name = r.Name + rc.DisplayName = r.DisplayName + rc.SiteURL = r.SiteURL + rc.CreateAt = r.CreateAt + rc.LastPingAt = r.LastPingAt + rc.CreatorId = r.CreatorId + } + return rc +} + +func (r auditRemoteCluster) MarshalJSONObject(enc *gojay.Encoder) { + enc.StringKey("remote_id", r.RemoteId) + enc.StringKey("remote_team_id", r.RemoteTeamId) + enc.StringKey("name", r.Name) + enc.StringKey("display_name", r.DisplayName) + enc.StringKey("site_url", r.SiteURL) + enc.Int64Key("create_at", r.CreateAt) + enc.Int64Key("last_ping_at", r.LastPingAt) + enc.StringKey("creator_id", r.CreatorId) +} + +func (r auditRemoteCluster) IsNil() bool { + return false +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/audits.go b/vendor/github.com/mattermost/mattermost/server/public/model/audits.go new file mode 100644 index 00000000..1c547c89 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/audits.go @@ -0,0 +1,14 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +type Audits []Audit + +func (o Audits) Etag() string { + if len(o) > 0 { + // the first in the list is always the most current + return Etag(o[0].CreateAt) + } + return "" +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/authorize.go b/vendor/github.com/mattermost/mattermost/server/public/model/authorize.go new file mode 100644 index 00000000..74f8ef55 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/authorize.go @@ -0,0 +1,300 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "crypto/sha256" + "encoding/base64" + "net/http" + "net/url" + "regexp" +) + +var ( + codeChallengeRegex = regexp.MustCompile("^[A-Za-z0-9_-]+$") + codeVerifierRegex = regexp.MustCompile(`^[A-Za-z0-9\-._~]+$`) +) + +const ( + AuthCodeExpireTime = 60 * 10 // 10 minutes + AuthCodeResponseType = "code" + ImplicitResponseType = "token" + DefaultScope = "user" + PKCECodeChallengeMethodS256 = "S256" + PKCECodeChallengeMinLength = 43 + PKCECodeChallengeMaxLength = 128 + PKCECodeVerifierMinLength = 43 + PKCECodeVerifierMaxLength = 128 +) + +type AuthData struct { + ClientId string `json:"client_id"` + UserId string `json:"user_id"` + Code string `json:"code"` + ExpiresIn int32 `json:"expires_in"` + CreateAt int64 `json:"create_at"` + RedirectUri string `json:"redirect_uri"` + State string `json:"state"` + Scope string `json:"scope"` + CodeChallenge string `json:"code_challenge,omitempty"` + CodeChallengeMethod string `json:"code_challenge_method,omitempty"` + Resource string `json:"resource,omitempty"` +} + +type AuthorizeRequest struct { + ResponseType string `json:"response_type"` + ClientId string `json:"client_id"` + RedirectURI string `json:"redirect_uri"` + Scope string `json:"scope"` + State string `json:"state"` + CodeChallenge string `json:"code_challenge,omitempty"` + CodeChallengeMethod string `json:"code_challenge_method,omitempty"` + Resource string `json:"resource,omitempty"` +} + +// IsValid validates the AuthData and returns an error if it isn't configured +// correctly. +func (ad *AuthData) IsValid() *AppError { + if !IsValidId(ad.ClientId) { + return NewAppError("AuthData.IsValid", "model.authorize.is_valid.client_id.app_error", nil, "", http.StatusBadRequest) + } + + if !IsValidId(ad.UserId) { + return NewAppError("AuthData.IsValid", "model.authorize.is_valid.user_id.app_error", nil, "", http.StatusBadRequest) + } + + if ad.Code == "" || len(ad.Code) > 128 { + return NewAppError("AuthData.IsValid", "model.authorize.is_valid.auth_code.app_error", nil, "client_id="+ad.ClientId, http.StatusBadRequest) + } + + if ad.ExpiresIn == 0 { + return NewAppError("AuthData.IsValid", "model.authorize.is_valid.expires.app_error", nil, "", http.StatusBadRequest) + } + + if ad.CreateAt <= 0 { + return NewAppError("AuthData.IsValid", "model.authorize.is_valid.create_at.app_error", nil, "client_id="+ad.ClientId, http.StatusBadRequest) + } + + if len(ad.RedirectUri) > 256 || !IsValidHTTPURL(ad.RedirectUri) { + return NewAppError("AuthData.IsValid", "model.authorize.is_valid.redirect_uri.app_error", nil, "client_id="+ad.ClientId, http.StatusBadRequest) + } + + if len(ad.State) > 1024 { + return NewAppError("AuthData.IsValid", "model.authorize.is_valid.state.app_error", nil, "client_id="+ad.ClientId, http.StatusBadRequest) + } + + if len(ad.Scope) > 128 { + return NewAppError("AuthData.IsValid", "model.authorize.is_valid.scope.app_error", nil, "client_id="+ad.ClientId, http.StatusBadRequest) + } + + // PKCE validation - if one PKCE field is present, both must be present and valid + if ad.CodeChallenge != "" || ad.CodeChallengeMethod != "" { + if err := ad.validatePKCE(); err != nil { + return err + } + } + + // Resource validation per RFC 8707 + if ad.Resource != "" { + if err := ValidateResourceParameter(ad.Resource, ad.ClientId, "AuthData.IsValid"); err != nil { + return err + } + } + + return nil +} + +// IsValid validates the AuthorizeRequest and returns an error if it isn't configured +// correctly. +func (ar *AuthorizeRequest) IsValid() *AppError { + if !IsValidId(ar.ClientId) { + return NewAppError("AuthData.IsValid", "model.authorize.is_valid.client_id.app_error", nil, "", http.StatusBadRequest) + } + + if ar.ResponseType == "" { + return NewAppError("AuthData.IsValid", "model.authorize.is_valid.response_type.app_error", nil, "", http.StatusBadRequest) + } + + if ar.RedirectURI == "" || len(ar.RedirectURI) > 256 || !IsValidHTTPURL(ar.RedirectURI) { + return NewAppError("AuthData.IsValid", "model.authorize.is_valid.redirect_uri.app_error", nil, "client_id="+ar.ClientId, http.StatusBadRequest) + } + + if len(ar.State) > 1024 { + return NewAppError("AuthData.IsValid", "model.authorize.is_valid.state.app_error", nil, "client_id="+ar.ClientId, http.StatusBadRequest) + } + + if len(ar.Scope) > 128 { + return NewAppError("AuthData.IsValid", "model.authorize.is_valid.scope.app_error", nil, "client_id="+ar.ClientId, http.StatusBadRequest) + } + + // PKCE validation - if one PKCE field is present, both must be present and valid + if ar.CodeChallenge != "" || ar.CodeChallengeMethod != "" { + if err := ar.validatePKCE(); err != nil { + return err + } + } + + // Resource validation per RFC 8707 + if ar.Resource != "" { + if err := ValidateResourceParameter(ar.Resource, ar.ClientId, "AuthorizeRequest.IsValid"); err != nil { + return err + } + } + + return nil +} + +func (ad *AuthData) PreSave() { + if ad.ExpiresIn == 0 { + ad.ExpiresIn = AuthCodeExpireTime + } + + if ad.CreateAt == 0 { + ad.CreateAt = GetMillis() + } + + if ad.Scope == "" { + ad.Scope = DefaultScope + } +} + +func (ad *AuthData) IsExpired() bool { + return GetMillis() > ad.CreateAt+int64(ad.ExpiresIn*1000) +} + +// validatePKCEParameters validates PKCE parameters (shared validation logic) +func validatePKCEParameters(codeChallenge, codeChallengeMethod, clientId, caller string) *AppError { + if codeChallenge == "" { + return NewAppError(caller, "model.authorize.is_valid.code_challenge.app_error", nil, "client_id="+clientId, http.StatusBadRequest) + } + + if codeChallengeMethod == "" { + return NewAppError(caller, "model.authorize.is_valid.code_challenge_method.app_error", nil, "client_id="+clientId, http.StatusBadRequest) + } + + // Only support S256 method for security + if codeChallengeMethod != PKCECodeChallengeMethodS256 { + return NewAppError(caller, "model.authorize.is_valid.code_challenge_method.unsupported.app_error", nil, "client_id="+clientId+", method="+codeChallengeMethod, http.StatusBadRequest) + } + + // Validate code challenge format (base64url encoded) + if len(codeChallenge) < PKCECodeChallengeMinLength || len(codeChallenge) > PKCECodeChallengeMaxLength { + return NewAppError(caller, "model.authorize.is_valid.code_challenge.length.app_error", nil, "client_id="+clientId, http.StatusBadRequest) + } + + // Validate base64url format (no padding, URL-safe characters) + if !codeChallengeRegex.MatchString(codeChallenge) { + return NewAppError(caller, "model.authorize.is_valid.code_challenge.format.app_error", nil, "client_id="+clientId, http.StatusBadRequest) + } + + return nil +} + +// validatePKCE validates PKCE parameters for AuthData +func (ad *AuthData) validatePKCE() *AppError { + return validatePKCEParameters(ad.CodeChallenge, ad.CodeChallengeMethod, ad.ClientId, "AuthData.validatePKCE") +} + +// validatePKCE validates PKCE parameters for AuthorizeRequest +func (ar *AuthorizeRequest) validatePKCE() *AppError { + return validatePKCEParameters(ar.CodeChallenge, ar.CodeChallengeMethod, ar.ClientId, "AuthorizeRequest.validatePKCE") +} + +// VerifyPKCE verifies a PKCE code_verifier against the stored code_challenge +func (ad *AuthData) VerifyPKCE(codeVerifier string) bool { + // Both empty = no PKCE was used (backward compatibility) + if ad.CodeChallenge == "" && ad.CodeChallengeMethod == "" { + return true + } + + // Only one empty = invalid data state + if ad.CodeChallenge == "" || ad.CodeChallengeMethod == "" { + return false + } + + // Validate code verifier length + if len(codeVerifier) < PKCECodeVerifierMinLength || len(codeVerifier) > PKCECodeVerifierMaxLength { + return false + } + + // Validate code verifier format (unreserved characters from RFC 3986) + if !codeVerifierRegex.MatchString(codeVerifier) { + return false + } + + // Only S256 method is supported + if ad.CodeChallengeMethod != PKCECodeChallengeMethodS256 { + return false + } + + // Calculate S256 challenge: BASE64URL-ENCODE(SHA256(ASCII(code_verifier))) + hash := sha256.Sum256([]byte(codeVerifier)) + calculatedChallenge := base64.RawURLEncoding.EncodeToString(hash[:]) + + return calculatedChallenge == ad.CodeChallenge +} + +// ValidatePKCEForClientType validates PKCE parameters based on OAuth client type and security requirements +func (ad *AuthData) ValidatePKCEForClientType(isPublicClient bool, codeVerifier string) *AppError { + if isPublicClient { + // RFC 7636: Public clients MUST use PKCE + if ad.CodeChallenge == "" { + return NewAppError("AuthData.ValidatePKCEForClientType", "model.authorize.validate_pkce.public_client_required.app_error", nil, "client_id="+ad.ClientId, http.StatusBadRequest) + } + if codeVerifier == "" { + return NewAppError("AuthData.ValidatePKCEForClientType", "model.authorize.validate_pkce.verifier_required.app_error", nil, "client_id="+ad.ClientId, http.StatusBadRequest) + } + // Verify the code verifier matches the stored code challenge + if !ad.VerifyPKCE(codeVerifier) { + return NewAppError("AuthData.ValidatePKCEForClientType", "model.authorize.validate_pkce.verification_failed.app_error", nil, "client_id="+ad.ClientId, http.StatusBadRequest) + } + } else { + // Confidential clients: PKCE is optional but enforced if initiated + if ad.CodeChallenge != "" { + // Client started flow with PKCE - code_verifier is required + if codeVerifier == "" { + return NewAppError("AuthData.ValidatePKCEForClientType", "model.authorize.validate_pkce.verifier_required.app_error", nil, "client_id="+ad.ClientId, http.StatusBadRequest) + } + // Verify the code verifier matches the stored code challenge + if !ad.VerifyPKCE(codeVerifier) { + return NewAppError("AuthData.ValidatePKCEForClientType", "model.authorize.validate_pkce.verification_failed.app_error", nil, "client_id="+ad.ClientId, http.StatusBadRequest) + } + } else if codeVerifier != "" { + // Client provided code_verifier but didn't use PKCE in authorization - reject + return NewAppError("AuthData.ValidatePKCEForClientType", "model.authorize.validate_pkce.not_used_in_auth.app_error", nil, "client_id="+ad.ClientId, http.StatusBadRequest) + } + } + + return nil +} + +// ValidateResourceParameter validates a resource parameter per RFC 8707 +func ValidateResourceParameter(resource, clientId, caller string) *AppError { + // Empty resource parameter is allowed (no resource specified) + if resource == "" { + return nil + } + + // Resource must not exceed 512 characters to fit in database column + if len(resource) > 512 { + return NewAppError(caller, "model.authorize.is_valid.resource.length.app_error", nil, "client_id="+clientId, http.StatusBadRequest) + } + + parsedURL, err := url.Parse(resource) + if err != nil { + return NewAppError(caller, "model.authorize.is_valid.resource.invalid_uri.app_error", nil, "client_id="+clientId, http.StatusBadRequest) + } + + // Must be absolute URI (has scheme) + if !parsedURL.IsAbs() { + return NewAppError(caller, "model.authorize.is_valid.resource.not_absolute.app_error", nil, "client_id="+clientId, http.StatusBadRequest) + } + + // Must not include a fragment component per RFC 8707 + if parsedURL.Fragment != "" { + return NewAppError(caller, "model.authorize.is_valid.resource.has_fragment.app_error", nil, "client_id="+clientId, http.StatusBadRequest) + } + + return nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/bot.go b/vendor/github.com/mattermost/mattermost/server/public/model/bot.go new file mode 100644 index 00000000..2370f04b --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/bot.go @@ -0,0 +1,230 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "fmt" + "net/http" + "strings" + "unicode/utf8" +) + +const ( + BotDisplayNameMaxRunes = UserFirstNameMaxRunes + BotDescriptionMaxRunes = 1024 + BotCreatorIdMaxRunes = KeyValuePluginIdMaxRunes // UserId or PluginId + BotWarnMetricBotUsername = "mattermost-advisor" + BotSystemBotUsername = "system-bot" +) + +// Bot is a special type of User meant for programmatic interactions. +// Note that the primary key of a bot is the UserId, and matches the primary key of the +// corresponding user. +type Bot struct { + UserId string `json:"user_id"` + Username string `json:"username"` + DisplayName string `json:"display_name,omitempty"` + Description string `json:"description,omitempty"` + OwnerId string `json:"owner_id"` + LastIconUpdate int64 `json:"last_icon_update,omitempty"` + CreateAt int64 `json:"create_at"` + UpdateAt int64 `json:"update_at"` + DeleteAt int64 `json:"delete_at"` +} + +func (b *Bot) Auditable() map[string]any { + return map[string]any{ + "user_id": b.UserId, + "username": b.Username, + "display_name": b.DisplayName, + "description": b.Description, + "owner_id": b.OwnerId, + "last_icon_update": b.LastIconUpdate, + "create_at": b.CreateAt, + "update_at": b.UpdateAt, + "delete_at": b.DeleteAt, + } +} + +// BotPatch is a description of what fields to update on an existing bot. +type BotPatch struct { + Username *string `json:"username"` + DisplayName *string `json:"display_name"` + Description *string `json:"description"` +} + +func (b *BotPatch) Auditable() map[string]any { + return map[string]any{ + "username": b.Username, + "display_name": b.DisplayName, + "description": b.Description, + } +} + +// BotGetOptions acts as a filter on bulk bot fetching queries. +type BotGetOptions struct { + OwnerId string + IncludeDeleted bool + OnlyOrphaned bool + Page int + PerPage int +} + +// BotList is a list of bots. +type BotList []*Bot + +// Trace describes the minimum information required to identify a bot for the purpose of logging. +func (b *Bot) Trace() map[string]any { + return map[string]any{"user_id": b.UserId} +} + +// Clone returns a shallow copy of the bot. +func (b *Bot) Clone() *Bot { + bCopy := *b + return &bCopy +} + +// IsValidCreate validates bot for Create call. This skips validations of fields that are auto-filled on Create +func (b *Bot) IsValidCreate() *AppError { + if !IsValidUsername(b.Username) { + return NewAppError("Bot.IsValid", "model.bot.is_valid.username.app_error", b.Trace(), "", http.StatusBadRequest) + } + + if utf8.RuneCountInString(b.DisplayName) > BotDisplayNameMaxRunes { + return NewAppError("Bot.IsValid", "model.bot.is_valid.user_id.app_error", b.Trace(), "", http.StatusBadRequest) + } + + if utf8.RuneCountInString(b.Description) > BotDescriptionMaxRunes { + return NewAppError("Bot.IsValid", "model.bot.is_valid.description.app_error", b.Trace(), "", http.StatusBadRequest) + } + + if b.OwnerId == "" || utf8.RuneCountInString(b.OwnerId) > BotCreatorIdMaxRunes { + return NewAppError("Bot.IsValid", "model.bot.is_valid.creator_id.app_error", b.Trace(), "", http.StatusBadRequest) + } + + return nil +} + +// IsValid validates the bot and returns an error if it isn't configured correctly. +func (b *Bot) IsValid() *AppError { + if !IsValidId(b.UserId) { + return NewAppError("Bot.IsValid", "model.bot.is_valid.user_id.app_error", b.Trace(), "", http.StatusBadRequest) + } + + if b.CreateAt == 0 { + return NewAppError("Bot.IsValid", "model.bot.is_valid.create_at.app_error", b.Trace(), "", http.StatusBadRequest) + } + + if b.UpdateAt == 0 { + return NewAppError("Bot.IsValid", "model.bot.is_valid.update_at.app_error", b.Trace(), "", http.StatusBadRequest) + } + return b.IsValidCreate() +} + +// PreSave should be run before saving a new bot to the database. +func (b *Bot) PreSave() { + b.CreateAt = GetMillis() + b.UpdateAt = b.CreateAt + b.DeleteAt = 0 + b.Username = NormalizeUsername(b.Username) +} + +// PreUpdate should be run before saving an updated bot to the database. +func (b *Bot) PreUpdate() { + b.UpdateAt = GetMillis() +} + +// Etag generates an etag for caching. +func (b *Bot) Etag() string { + return Etag(b.UserId, b.UpdateAt) +} + +// Patch modifies an existing bot with optional fields from the given patch. +// TODO 6.0: consider returning a boolean to indicate whether or not the patch +// applied any changes. +func (b *Bot) Patch(patch *BotPatch) { + if patch.Username != nil { + b.Username = *patch.Username + } + + if patch.DisplayName != nil { + b.DisplayName = *patch.DisplayName + } + + if patch.Description != nil { + b.Description = *patch.Description + } +} + +// WouldPatch returns whether or not the given patch would be applied or not. +func (b *Bot) WouldPatch(patch *BotPatch) bool { + if patch == nil { + return false + } + if patch.Username != nil && *patch.Username != b.Username { + return true + } + if patch.DisplayName != nil && *patch.DisplayName != b.DisplayName { + return true + } + if patch.Description != nil && *patch.Description != b.Description { + return true + } + return false +} + +// UserFromBot returns a user model describing the bot fields stored in the User store. +func UserFromBot(b *Bot) *User { + return &User{ + Id: b.UserId, + Username: b.Username, + Email: NormalizeEmail(fmt.Sprintf("%s@localhost", b.Username)), + FirstName: b.DisplayName, + Roles: SystemUserRoleId, + } +} + +// BotFromUser returns a bot model given a user model +func BotFromUser(u *User) *Bot { + return &Bot{ + OwnerId: u.Id, + UserId: u.Id, + Username: u.Username, + DisplayName: u.GetDisplayName(ShowUsername), + } +} + +// Etag computes the etag for a list of bots. +func (l *BotList) Etag() string { + id := "0" + var t int64 + var delta int64 + + for _, v := range *l { + if v.UpdateAt > t { + t = v.UpdateAt + id = v.UserId + } + } + + return Etag(id, t, delta, len(*l)) +} + +// MakeBotNotFoundError creates the error returned when a bot does not exist, or when the user isn't allowed to query the bot. +// The errors must the same in both cases to avoid leaking that a user is a bot. +func MakeBotNotFoundError(where, userId string) *AppError { + return NewAppError(where, "store.sql_bot.get.missing.app_error", map[string]any{"user_id": userId}, "", http.StatusNotFound) +} + +func IsBotDMChannel(channel *Channel, botUserID string) bool { + if channel.Type != ChannelTypeDirect { + return false + } + + if !strings.HasPrefix(channel.Name, botUserID+"__") && !strings.HasSuffix(channel.Name, "__"+botUserID) { + return false + } + + return true +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/builtin.go b/vendor/github.com/mattermost/mattermost/server/public/model/builtin.go new file mode 100644 index 00000000..fa9c053b --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/builtin.go @@ -0,0 +1,17 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +// NewPointer returns a pointer to the object passed. +func NewPointer[T any](t T) *T { return &t } + +// SafeDereference returns the zero value of T if t is nil. +// Otherwise, it returns t dereferenced. +func SafeDereference[T any](t *T) T { + if t == nil { + var t T + return t + } + return *t +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/bulk_export.go b/vendor/github.com/mattermost/mattermost/server/public/model/bulk_export.go new file mode 100644 index 00000000..31d8a850 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/bulk_export.go @@ -0,0 +1,16 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +// ExportDataDir is the name of the directory were to store additional data +// included with the export (e.g. file attachments). +const ExportDataDir = "data" + +type BulkExportOpts struct { + IncludeAttachments bool + IncludeProfilePictures bool + IncludeArchivedChannels bool + IncludeRolesAndSchemes bool + CreateArchive bool +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/bundle_info.go b/vendor/github.com/mattermost/mattermost/server/public/model/bundle_info.go new file mode 100644 index 00000000..55c89faa --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/bundle_info.go @@ -0,0 +1,34 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "github.com/mattermost/mattermost/server/public/shared/mlog" +) + +type BundleInfo struct { + Path string + + Manifest *Manifest + ManifestPath string + ManifestError error +} + +func (b *BundleInfo) WrapLogger(logger *mlog.Logger) *mlog.Logger { + if b.Manifest != nil { + return logger.With(mlog.String("plugin_id", b.Manifest.Id)) + } + return logger.With(mlog.String("plugin_path", b.Path)) +} + +// Returns bundle info for the given path. The return value is never nil. +func BundleInfoForPath(path string) *BundleInfo { + m, mpath, err := FindManifest(path) + return &BundleInfo{ + Path: path, + Manifest: m, + ManifestPath: mpath, + ManifestError: err, + } +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/cel.go b/vendor/github.com/mattermost/mattermost/server/public/model/cel.go new file mode 100644 index 00000000..f10d9f55 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/cel.go @@ -0,0 +1,32 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +// ValueType indicates whether a value is a literal or another attribute. +type ValueType int + +const ( + LiteralValue ValueType = iota + AttrValue +) + +// Condition represents a single logical condition (e.g., user.attributes.Team == "Engineering"). +type Condition struct { + // Left-hand side attribute selector (e.g., "user.attributes.Team"). + Attribute string `json:"attribute"` + // The comparison operator. + Operator string `json:"operator"` + // Right-hand side value(s). Can be a single value or a slice for 'in'. + Value any `json:"value"` + // Type of the Value (LiteralValue or AttributeValue). Needed for comparisons like user.attr1 == user.attr2. + ValueType ValueType `json:"value_type"` + // Type of the Attribute (e.g., "text", "select", "multiselect"). + AttributeType string `json:"attribute_type"` +} + +// VisualExpression represents a series of conditions combined with logical AND. +type VisualExpression struct { + // Conditions is a list of individual conditions that will be ANDed together. + Conditions []Condition `json:"conditions"` +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/channel.go b/vendor/github.com/mattermost/mattermost/server/public/model/channel.go new file mode 100644 index 00000000..615bbc75 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/channel.go @@ -0,0 +1,520 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "crypto/sha1" + "database/sql/driver" + "encoding/hex" + "encoding/json" + "fmt" + "io" + "net/http" + "regexp" + "sort" + "strings" + "unicode/utf8" +) + +var ( + // Validates both 3-digit (#RGB) and 6-digit (#RRGGBB) hex colors + channelHexColorRegex = regexp.MustCompile(`^#([0-9a-fA-F]{3}|[0-9a-fA-F]{6})$`) +) + +type ChannelType string + +const ( + ChannelTypeOpen ChannelType = "O" + ChannelTypePrivate ChannelType = "P" + ChannelTypeDirect ChannelType = "D" + ChannelTypeGroup ChannelType = "G" + + ChannelGroupMaxUsers = 8 + ChannelGroupMinUsers = 3 + DefaultChannelName = "town-square" + ChannelDisplayNameMaxRunes = 64 + ChannelNameMinLength = 1 + ChannelNameMaxLength = 64 + ChannelHeaderMaxRunes = 1024 + ChannelPurposeMaxRunes = 250 + ChannelCacheSize = 25000 + ChannelBannerInfoMaxLength = 1024 + + ChannelSortByUsername = "username" + ChannelSortByStatus = "status" +) + +type ChannelBannerInfo struct { + Enabled *bool `json:"enabled"` + Text *string `json:"text"` + BackgroundColor *string `json:"background_color"` +} + +func (c *ChannelBannerInfo) Scan(value any) error { + if value == nil { + return nil + } + + b, ok := value.([]byte) + if !ok { + return fmt.Errorf("expected []byte, got %T", value) + } + + return json.Unmarshal(b, c) +} + +func (c ChannelBannerInfo) Value() (driver.Value, error) { + if c == (ChannelBannerInfo{}) { + return nil, nil + } + + j, err := json.Marshal(c) + if err != nil { + return nil, err + } + return string(j), nil +} + +type Channel struct { + Id string `json:"id"` + CreateAt int64 `json:"create_at"` + UpdateAt int64 `json:"update_at"` + DeleteAt int64 `json:"delete_at"` + TeamId string `json:"team_id"` + Type ChannelType `json:"type"` + DisplayName string `json:"display_name"` + Name string `json:"name"` + Header string `json:"header"` + Purpose string `json:"purpose"` + LastPostAt int64 `json:"last_post_at"` + TotalMsgCount int64 `json:"total_msg_count"` + ExtraUpdateAt int64 `json:"extra_update_at"` + CreatorId string `json:"creator_id"` + SchemeId *string `json:"scheme_id"` + Props map[string]any `json:"props"` + GroupConstrained *bool `json:"group_constrained"` + Shared *bool `json:"shared"` + TotalMsgCountRoot int64 `json:"total_msg_count_root"` + PolicyID *string `json:"policy_id"` + LastRootPostAt int64 `json:"last_root_post_at"` + BannerInfo *ChannelBannerInfo `json:"banner_info"` + PolicyEnforced bool `json:"policy_enforced"` + DefaultCategoryName string `json:"default_category_name"` +} + +func (o *Channel) Auditable() map[string]any { + return map[string]any{ + "create_at": o.CreateAt, + "creator_id": o.CreatorId, + "delete_at": o.DeleteAt, + "extra_group_at": o.ExtraUpdateAt, + "group_constrained": o.GroupConstrained, + "id": o.Id, + "last_post_at": o.LastPostAt, + "last_root_post_at": o.LastRootPostAt, + "policy_id": o.PolicyID, + "props": o.Props, + "scheme_id": o.SchemeId, + "shared": o.Shared, + "team_id": o.TeamId, + "total_msg_count_root": o.TotalMsgCountRoot, + "type": o.Type, + "update_at": o.UpdateAt, + "policy_enforced": o.PolicyEnforced, + } +} + +func (o *Channel) LogClone() any { + return o.Auditable() +} + +type ChannelWithTeamData struct { + Channel + TeamDisplayName string `json:"team_display_name"` + TeamName string `json:"team_name"` + TeamUpdateAt int64 `json:"team_update_at"` +} + +type ChannelsWithCount struct { + Channels ChannelListWithTeamData `json:"channels"` + TotalCount int64 `json:"total_count"` +} + +type ChannelPatch struct { + DisplayName *string `json:"display_name"` + Name *string `json:"name"` + Header *string `json:"header"` + Purpose *string `json:"purpose"` + GroupConstrained *bool `json:"group_constrained"` + BannerInfo *ChannelBannerInfo `json:"banner_info"` +} + +func (c *ChannelPatch) Auditable() map[string]any { + return map[string]any{ + "header": c.Header, + "group_constrained": c.GroupConstrained, + "purpose": c.Purpose, + } +} + +type ChannelForExport struct { + Channel + TeamName string + SchemeName *string +} + +type DirectChannelForExport struct { + Channel + Members []*ChannelMemberForExport +} + +type ChannelModeration struct { + Name string `json:"name"` + Roles *ChannelModeratedRoles `json:"roles"` +} + +type ChannelModeratedRoles struct { + Guests *ChannelModeratedRole `json:"guests"` + Members *ChannelModeratedRole `json:"members"` +} + +type ChannelModeratedRole struct { + Value bool `json:"value"` + Enabled bool `json:"enabled"` +} + +type ChannelModerationPatch struct { + Name *string `json:"name"` + Roles *ChannelModeratedRolesPatch `json:"roles"` +} + +func (c *ChannelModerationPatch) Auditable() map[string]any { + return map[string]any{ + "name": c.Name, + "roles": c.Roles, + } +} + +type ChannelModeratedRolesPatch struct { + Guests *bool `json:"guests"` + Members *bool `json:"members"` +} + +// ChannelSearchOpts contains options for searching channels. +// +// NotAssociatedToGroup will exclude channels that have associated, active GroupChannels records. +// ExcludeDefaultChannels will exclude the configured default channels (ex 'town-square' and 'off-topic'). +// IncludeDeleted will include channel records where DeleteAt != 0. +// ExcludeChannelNames will exclude channels from the results by name. +// IncludeSearchById will include searching matches against channel IDs in the results +// Paginate whether to paginate the results. +// Page page requested, if results are paginated. +// PerPage number of results per page, if paginated. +// ExcludeAccessPolicyEnforced will exclude channels that are enforced by an access policy. +type ChannelSearchOpts struct { + NotAssociatedToGroup string + ExcludeDefaultChannels bool + IncludeDeleted bool // If true, deleted channels will be included in the results. + Deleted bool + ExcludeChannelNames []string + TeamIds []string + GroupConstrained bool + ExcludeGroupConstrained bool + PolicyID string + ExcludePolicyConstrained bool + IncludePolicyID bool + IncludeSearchById bool + ExcludeRemote bool + Public bool + Private bool + Page *int + PerPage *int + LastDeleteAt int // When combined with IncludeDeleted, only channels deleted after this time will be returned. + LastUpdateAt int + AccessControlPolicyEnforced bool + ExcludeAccessControlPolicyEnforced bool + ParentAccessControlPolicyId string +} + +type ChannelMemberCountByGroup struct { + GroupId string `json:"group_id"` + ChannelMemberCount int64 `json:"channel_member_count"` + ChannelMemberTimezonesCount int64 `json:"channel_member_timezones_count"` +} + +type ChannelOption func(channel *Channel) + +var gmNameRegex = regexp.MustCompile("^[a-f0-9]{40}$") + +func WithID(ID string) ChannelOption { + return func(channel *Channel) { + channel.Id = ID + } +} + +func (o *Channel) DeepCopy() *Channel { + cCopy := *o + if cCopy.SchemeId != nil { + cCopy.SchemeId = NewPointer(*o.SchemeId) + } + return &cCopy +} + +func (o *Channel) Etag() string { + return Etag(o.Id, o.UpdateAt) +} + +func (o *Channel) IsValid() *AppError { + if !IsValidId(o.Id) { + return NewAppError("Channel.IsValid", "model.channel.is_valid.id.app_error", nil, "", http.StatusBadRequest) + } + + if o.CreateAt == 0 { + return NewAppError("Channel.IsValid", "model.channel.is_valid.create_at.app_error", nil, "id="+o.Id, http.StatusBadRequest) + } + + if o.UpdateAt == 0 { + return NewAppError("Channel.IsValid", "model.channel.is_valid.update_at.app_error", nil, "id="+o.Id, http.StatusBadRequest) + } + + if utf8.RuneCountInString(o.DisplayName) > ChannelDisplayNameMaxRunes { + return NewAppError("Channel.IsValid", "model.channel.is_valid.display_name.app_error", nil, "id="+o.Id, http.StatusBadRequest) + } + + if !IsValidChannelIdentifier(o.Name) { + return NewAppError("Channel.IsValid", "model.channel.is_valid.1_or_more.app_error", nil, "id="+o.Id, http.StatusBadRequest) + } + + if !(o.Type == ChannelTypeOpen || o.Type == ChannelTypePrivate || o.Type == ChannelTypeDirect || o.Type == ChannelTypeGroup) { + return NewAppError("Channel.IsValid", "model.channel.is_valid.type.app_error", nil, "id="+o.Id, http.StatusBadRequest) + } + + if utf8.RuneCountInString(o.Header) > ChannelHeaderMaxRunes { + return NewAppError("Channel.IsValid", "model.channel.is_valid.header.app_error", nil, "id="+o.Id, http.StatusBadRequest) + } + + if utf8.RuneCountInString(o.Purpose) > ChannelPurposeMaxRunes { + return NewAppError("Channel.IsValid", "model.channel.is_valid.purpose.app_error", nil, "id="+o.Id, http.StatusBadRequest) + } + + if len(o.CreatorId) > 26 { + return NewAppError("Channel.IsValid", "model.channel.is_valid.creator_id.app_error", nil, "", http.StatusBadRequest) + } + + if o.Type != ChannelTypeDirect && o.Type != ChannelTypeGroup { + userIds := strings.Split(o.Name, "__") + if ok := gmNameRegex.MatchString(o.Name); ok || (o.Type != ChannelTypeDirect && len(userIds) == 2 && IsValidId(userIds[0]) && IsValidId(userIds[1])) { + return NewAppError("Channel.IsValid", "model.channel.is_valid.name.app_error", nil, "", http.StatusBadRequest) + } + } + + if o.BannerInfo != nil && o.BannerInfo.Enabled != nil && *o.BannerInfo.Enabled { + if o.Type != ChannelTypeOpen && o.Type != ChannelTypePrivate { + return NewAppError("Channel.IsValid", "model.channel.is_valid.banner_info.channel_type.app_error", nil, "", http.StatusBadRequest) + } + + if o.BannerInfo.Text == nil || len(*o.BannerInfo.Text) == 0 { + return NewAppError("Channel.IsValid", "model.channel.is_valid.banner_info.text.empty.app_error", nil, "", http.StatusBadRequest) + } else if len(*o.BannerInfo.Text) > ChannelBannerInfoMaxLength { + return NewAppError("Channel.IsValid", "model.channel.is_valid.banner_info.text.invalid_length.app_error", map[string]any{"maxLength": ChannelBannerInfoMaxLength}, "", http.StatusBadRequest) + } + + if o.BannerInfo.BackgroundColor == nil || len(*o.BannerInfo.BackgroundColor) == 0 { + return NewAppError("Channel.IsValid", "model.channel.is_valid.banner_info.background_color.empty.app_error", nil, "", http.StatusBadRequest) + } + + if !channelHexColorRegex.MatchString(*o.BannerInfo.BackgroundColor) { + return NewAppError("Channel.IsValid", "model.channel.is_valid.banner_info.background_color.invalid.app_error", nil, "", http.StatusBadRequest) + } + } + + return nil +} + +func (o *Channel) PreSave() { + if o.Id == "" { + o.Id = NewId() + } + + o.Name = SanitizeUnicode(o.Name) + o.DisplayName = SanitizeUnicode(o.DisplayName) + if o.CreateAt == 0 { + o.CreateAt = GetMillis() + } + o.UpdateAt = o.CreateAt + o.ExtraUpdateAt = 0 +} + +func (o *Channel) PreUpdate() { + o.UpdateAt = GetMillis() + o.Name = SanitizeUnicode(o.Name) + o.DisplayName = SanitizeUnicode(o.DisplayName) +} + +func (o *Channel) IsGroupOrDirect() bool { + return o.Type == ChannelTypeDirect || o.Type == ChannelTypeGroup +} + +func (o *Channel) IsOpen() bool { + return o.Type == ChannelTypeOpen +} + +func (o *Channel) Patch(patch *ChannelPatch) { + if patch.DisplayName != nil { + o.DisplayName = strings.TrimSpace(*patch.DisplayName) + } + + if patch.Name != nil { + o.Name = *patch.Name + } + + if patch.Header != nil { + o.Header = *patch.Header + } + + if patch.Purpose != nil { + o.Purpose = *patch.Purpose + } + + if patch.GroupConstrained != nil { + o.GroupConstrained = patch.GroupConstrained + } + + // patching channel banner info + if patch.BannerInfo != nil { + if o.BannerInfo == nil { + o.BannerInfo = &ChannelBannerInfo{} + } + + if patch.BannerInfo.Enabled != nil { + o.BannerInfo.Enabled = patch.BannerInfo.Enabled + } + + if patch.BannerInfo.Text != nil { + o.BannerInfo.Text = patch.BannerInfo.Text + } + + if patch.BannerInfo.BackgroundColor != nil { + o.BannerInfo.BackgroundColor = patch.BannerInfo.BackgroundColor + } + } +} + +func (o *Channel) MakeNonNil() { + if o.Props == nil { + o.Props = make(map[string]any) + } +} + +func (o *Channel) AddProp(key string, value any) { + o.MakeNonNil() + + o.Props[key] = value +} + +func (o *Channel) IsGroupConstrained() bool { + return o.GroupConstrained != nil && *o.GroupConstrained +} + +func (o *Channel) IsShared() bool { + return o.Shared != nil && *o.Shared +} + +func (o *Channel) GetOtherUserIdForDM(userId string) string { + user1, user2 := o.GetBothUsersForDM() + + if user2 == "" { + return "" + } + + if user1 == userId { + return user2 + } + + return user1 +} + +func (o *Channel) GetBothUsersForDM() (string, string) { + if o.Type != ChannelTypeDirect { + return "", "" + } + + userIds := strings.Split(o.Name, "__") + if len(userIds) != 2 { + return "", "" + } + + if userIds[0] == userIds[1] { + return userIds[0], "" + } + + return userIds[0], userIds[1] +} + +func (o *Channel) Sanitize() Channel { + return Channel{ + Id: o.Id, + TeamId: o.TeamId, + Type: o.Type, + DisplayName: o.DisplayName, + } +} + +func (t ChannelType) MarshalJSON() ([]byte, error) { + return json.Marshal(string(t)) +} + +func GetDMNameFromIds(userId1, userId2 string) string { + if userId1 > userId2 { + return userId2 + "__" + userId1 + } + return userId1 + "__" + userId2 +} + +func GetGroupDisplayNameFromUsers(users []*User, truncate bool) string { + usernames := make([]string, len(users)) + for index, user := range users { + usernames[index] = user.Username + } + + sort.Strings(usernames) + + name := strings.Join(usernames, ", ") + + if truncate && len(name) > ChannelNameMaxLength { + name = name[:ChannelNameMaxLength] + } + + return name +} + +func GetGroupNameFromUserIds(userIds []string) string { + sort.Strings(userIds) + + h := sha1.New() + for _, id := range userIds { + io.WriteString(h, id) + } + + return hex.EncodeToString(h.Sum(nil)) +} + +type GroupMessageConversionRequestBody struct { + ChannelID string `json:"channel_id"` + TeamID string `json:"team_id"` + Name string `json:"name"` + DisplayName string `json:"display_name"` +} + +// ChannelMembersGetOptions provides parameters for getting channel members +type ChannelMembersGetOptions struct { + // ChannelID specifies which channel to get members for + ChannelID string + // Offset for pagination + Offset int + // Limit for pagination (maximum number of results to return) + Limit int + // UpdatedAfter filters members updated after the given timestamp (cursor-based pagination) + UpdatedAfter int64 +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/channel_bookmark.go b/vendor/github.com/mattermost/mattermost/server/public/model/channel_bookmark.go new file mode 100644 index 00000000..3b707a35 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/channel_bookmark.go @@ -0,0 +1,336 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "net/http" + "strings" + "unicode/utf8" +) + +type ChannelBookmarkType string + +const ( + ChannelBookmarkLink ChannelBookmarkType = "link" + ChannelBookmarkFile ChannelBookmarkType = "file" + BookmarkFileOwner = "bookmark" + MaxBookmarksPerChannel = 50 + DisplayNameMaxRunes = 64 + LinkMaxRunes = 1024 +) + +type ChannelBookmark struct { + Id string `json:"id"` + CreateAt int64 `json:"create_at"` + UpdateAt int64 `json:"update_at"` + DeleteAt int64 `json:"delete_at"` + ChannelId string `json:"channel_id"` + OwnerId string `json:"owner_id"` + FileId string `json:"file_id"` + DisplayName string `json:"display_name"` + SortOrder int64 `json:"sort_order"` + LinkUrl string `json:"link_url,omitempty"` + ImageUrl string `json:"image_url,omitempty"` + Emoji string `json:"emoji,omitempty"` + Type ChannelBookmarkType `json:"type"` + OriginalId string `json:"original_id,omitempty"` + ParentId string `json:"parent_id,omitempty"` +} + +func (o *ChannelBookmark) Auditable() map[string]any { + return map[string]any{ + "id": o.Id, + "create_at": o.CreateAt, + "update_at": o.UpdateAt, + "delete_at": o.DeleteAt, + "channel_id": o.ChannelId, + "owner_id": o.OwnerId, + "file_id": o.FileId, + "type": o.Type, + "original_id": o.OriginalId, + "parent_id": o.ParentId, + } +} + +// Clone returns a shallow copy of the channel bookmark. +func (o *ChannelBookmark) Clone() *ChannelBookmark { + bCopy := *o + return &bCopy +} + +// SetOriginal generates a new bookmark copying the data of the +// receiver bookmark, resets its timestamps and main ID, updates its +// OriginalId and sets the owner to the ID passed as a parameter +func (o *ChannelBookmark) SetOriginal(newOwnerId string) *ChannelBookmark { + bCopy := *o + bCopy.Id = "" + bCopy.CreateAt = 0 + bCopy.DeleteAt = 0 + bCopy.UpdateAt = 0 + bCopy.OriginalId = o.Id + bCopy.OwnerId = newOwnerId + return &bCopy +} + +func (o *ChannelBookmark) IsValid() *AppError { + if !IsValidId(o.Id) { + return NewAppError("ChannelBookmark.IsValid", "model.channel_bookmark.is_valid.id.app_error", nil, "", http.StatusBadRequest) + } + + if o.CreateAt == 0 { + return NewAppError("ChannelBookmark.IsValid", "model.channel_bookmark.is_valid.create_at.app_error", nil, "id="+o.Id, http.StatusBadRequest) + } + + if o.UpdateAt == 0 { + return NewAppError("ChannelBookmark.IsValid", "model.channel_bookmark.is_valid.update_at.app_error", nil, "id="+o.Id, http.StatusBadRequest) + } + + if !IsValidId(o.ChannelId) { + return NewAppError("ChannelBookmark.IsValid", "model.channel_bookmark.is_valid.channel_id.app_error", nil, "", http.StatusBadRequest) + } + + if !IsValidId(o.OwnerId) { + return NewAppError("ChannelBookmark.IsValid", "model.channel_bookmark.is_valid.owner_id.app_error", nil, "", http.StatusBadRequest) + } + + if o.DisplayName == "" || utf8.RuneCountInString(o.DisplayName) > DisplayNameMaxRunes { + return NewAppError("ChannelBookmark.IsValid", "model.channel_bookmark.is_valid.display_name.app_error", nil, "", http.StatusBadRequest) + } + + if !(o.Type == ChannelBookmarkFile || o.Type == ChannelBookmarkLink) { + return NewAppError("ChannelBookmark.IsValid", "model.channel_bookmark.is_valid.type.app_error", nil, "id="+o.Id, http.StatusBadRequest) + } + + if o.Type == ChannelBookmarkLink && o.FileId != "" { + return NewAppError("ChannelBookmark.IsValid", "model.channel_bookmark.is_valid.file_id.missing_or_invalid.app_error", nil, "id="+o.Id, http.StatusBadRequest) + } + + if o.Type == ChannelBookmarkFile && o.LinkUrl != "" { + return NewAppError("ChannelBookmark.IsValid", "model.channel_bookmark.is_valid.link_url.missing_or_invalid.app_error", nil, "id="+o.Id, http.StatusBadRequest) + } + + if o.Type == ChannelBookmarkLink && (o.LinkUrl == "" || !IsValidHTTPURL(o.LinkUrl) || utf8.RuneCountInString(o.LinkUrl) > LinkMaxRunes) { + return NewAppError("ChannelBookmark.IsValid", "model.channel_bookmark.is_valid.link_url.missing_or_invalid.app_error", nil, "id="+o.Id, http.StatusBadRequest) + } + + if o.Type == ChannelBookmarkLink && o.ImageUrl != "" && (!IsValidHTTPURL(o.ImageUrl) || utf8.RuneCountInString(o.ImageUrl) > LinkMaxRunes) { + return NewAppError("ChannelBookmark.IsValid", "model.channel_bookmark.is_valid.image_url.app_error", nil, "id="+o.Id, http.StatusBadRequest) + } + + if o.Type == ChannelBookmarkFile && (o.FileId == "" || !IsValidId(o.FileId)) { + return NewAppError("ChannelBookmark.IsValid", "model.channel_bookmark.is_valid.file_id.missing_or_invalid.app_error", nil, "id="+o.Id, http.StatusBadRequest) + } + + if o.ImageUrl != "" && o.FileId != "" { + return NewAppError("ChannelBookmark.IsValid", "model.channel_bookmark.is_valid.link_file.app_error", nil, "id="+o.Id, http.StatusBadRequest) + } + + if o.OriginalId != "" && !IsValidId(o.OriginalId) { + return NewAppError("ChannelBookmark.IsValid", "model.channel_bookmark.is_valid.original_id.app_error", nil, "", http.StatusBadRequest) + } + + if o.ParentId != "" && !IsValidId(o.ParentId) { + return NewAppError("ChannelBookmark.IsValid", "model.channel_bookmark.is_valid.parent_id.app_error", nil, "", http.StatusBadRequest) + } + + return nil +} + +func (o *ChannelBookmark) PreSave() { + if o.Id == "" { + o.Id = NewId() + } + + o.DisplayName = SanitizeUnicode(o.DisplayName) + o.Emoji = strings.Trim(o.Emoji, ":") + if o.CreateAt == 0 { + o.CreateAt = GetMillis() + } + o.UpdateAt = o.CreateAt +} + +func (o *ChannelBookmark) PreUpdate() { + o.UpdateAt = GetMillis() + o.DisplayName = SanitizeUnicode(o.DisplayName) + o.Emoji = strings.Trim(o.Emoji, ":") +} + +func (o *ChannelBookmark) ToBookmarkWithFileInfo(f *FileInfo) *ChannelBookmarkWithFileInfo { + bwf := ChannelBookmarkWithFileInfo{ + ChannelBookmark: &ChannelBookmark{ + Id: o.Id, + CreateAt: o.CreateAt, + UpdateAt: o.UpdateAt, + DeleteAt: o.DeleteAt, + ChannelId: o.ChannelId, + OwnerId: o.OwnerId, + FileId: o.FileId, + DisplayName: o.DisplayName, + SortOrder: o.SortOrder, + LinkUrl: o.LinkUrl, + ImageUrl: o.ImageUrl, + Emoji: strings.Trim(o.Emoji, ":"), + Type: o.Type, + OriginalId: o.OriginalId, + ParentId: o.ParentId, + }, + } + + if f != nil && f.Id != "" { + bwf.FileInfo = f + } + + return &bwf +} + +type ChannelBookmarkPatch struct { + FileId *string `json:"file_id"` + DisplayName *string `json:"display_name"` + SortOrder *int64 `json:"sort_order"` + LinkUrl *string `json:"link_url,omitempty"` + ImageUrl *string `json:"image_url,omitempty"` + Emoji *string `json:"emoji,omitempty"` +} + +func (o *ChannelBookmarkPatch) Auditable() map[string]any { + return map[string]any{ + "file_id": o.FileId, + } +} + +func (o *ChannelBookmark) Patch(patch *ChannelBookmarkPatch) { + if patch.FileId != nil { + o.FileId = *patch.FileId + } + + if patch.DisplayName != nil { + o.DisplayName = *patch.DisplayName + } + if patch.SortOrder != nil { + o.SortOrder = *patch.SortOrder + } + if patch.LinkUrl != nil { + o.LinkUrl = *patch.LinkUrl + } + if patch.ImageUrl != nil { + o.ImageUrl = *patch.ImageUrl + } + if patch.Emoji != nil { + o.Emoji = *patch.Emoji + } +} + +type ChannelBookmarkWithFileInfo struct { + *ChannelBookmark + FileInfo *FileInfo `json:"file,omitempty"` +} + +func (o *ChannelBookmarkWithFileInfo) Auditable() map[string]any { + a := o.ChannelBookmark.Auditable() + if o.FileInfo != nil { + a["file"] = o.FileInfo.Auditable() + } + + return a +} + +// Clone returns a shallow copy of the channel bookmark with file info. +func (o *ChannelBookmarkWithFileInfo) Clone() *ChannelBookmarkWithFileInfo { + bCopy := *o + return &bCopy +} + +type ChannelWithBookmarks struct { + *Channel + Bookmarks []*ChannelBookmarkWithFileInfo `json:"bookmarks,omitempty"` +} + +type ChannelWithTeamDataAndBookmarks struct { + *ChannelWithTeamData + Bookmarks []*ChannelBookmarkWithFileInfo `json:"bookmarks,omitempty"` +} + +type UpdateChannelBookmarkResponse struct { + Updated *ChannelBookmarkWithFileInfo `json:"updated,omitempty"` + Deleted *ChannelBookmarkWithFileInfo `json:"deleted,omitempty"` +} + +func (o *UpdateChannelBookmarkResponse) Auditable() map[string]any { + a := map[string]any{} + if o.Updated != nil { + a["updated"] = o.Updated.Auditable() + } + if o.Deleted != nil { + a["updated"] = o.Deleted.Auditable() + } + return a +} + +type ChannelBookmarkAndFileInfo struct { + Id string + CreateAt int64 + UpdateAt int64 + DeleteAt int64 + ChannelId string + OwnerId string + FileInfoId string + DisplayName string + SortOrder int64 + LinkUrl string + ImageUrl string + Emoji string + Type ChannelBookmarkType + OriginalId string + ParentId string + FileId string + FileName string + Extension string + Size int64 + MimeType string + Width int + Height int + HasPreviewImage bool + MiniPreview *[]byte +} + +func (o *ChannelBookmarkAndFileInfo) ToChannelBookmarkWithFileInfo() *ChannelBookmarkWithFileInfo { + bwf := &ChannelBookmarkWithFileInfo{ + ChannelBookmark: &ChannelBookmark{ + Id: o.Id, + CreateAt: o.CreateAt, + UpdateAt: o.UpdateAt, + DeleteAt: o.DeleteAt, + ChannelId: o.ChannelId, + OwnerId: o.OwnerId, + FileId: o.FileInfoId, + DisplayName: o.DisplayName, + SortOrder: o.SortOrder, + LinkUrl: o.LinkUrl, + ImageUrl: o.ImageUrl, + Emoji: o.Emoji, + Type: o.Type, + OriginalId: o.OriginalId, + ParentId: o.ParentId, + }, + } + + if o.FileInfoId != "" && o.FileId != "" { + miniPreview := o.MiniPreview + if len(*miniPreview) == 0 { + miniPreview = nil + } + bwf.FileInfo = &FileInfo{ + Id: o.FileId, + Name: o.FileName, + Extension: o.Extension, + Size: o.Size, + MimeType: o.MimeType, + Width: o.Width, + Height: o.Height, + HasPreviewImage: o.HasPreviewImage, + MiniPreview: miniPreview, + } + } + return bwf +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/channel_count.go b/vendor/github.com/mattermost/mattermost/server/public/model/channel_count.go new file mode 100644 index 00000000..4da39083 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/channel_count.go @@ -0,0 +1,43 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "crypto/md5" + "fmt" + "sort" + "strconv" + "strings" +) + +type ChannelCounts struct { + Counts map[string]int64 `json:"counts"` + CountsRoot map[string]int64 `json:"counts_root"` + UpdateTimes map[string]int64 `json:"update_times"` +} + +func (o *ChannelCounts) Etag() string { + // we don't include CountsRoot in ETag calculation, since it's a derivative + ids := []string{} + for id := range o.Counts { + ids = append(ids, id) + } + sort.Strings(ids) + + var str strings.Builder + for _, id := range ids { + str.WriteString(id + strconv.FormatInt(o.Counts[id], 10)) + } + + md5Counts := fmt.Sprintf("%x", md5.Sum([]byte(str.String()))) + + var update int64 + for _, u := range o.UpdateTimes { + if u > update { + update = u + } + } + + return Etag(md5Counts, update) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/channel_data.go b/vendor/github.com/mattermost/mattermost/server/public/model/channel_data.go new file mode 100644 index 00000000..9d8b9aca --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/channel_data.go @@ -0,0 +1,18 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +type ChannelData struct { + Channel *Channel `json:"channel"` + Member *ChannelMember `json:"member"` +} + +func (o *ChannelData) Etag() string { + var mt int64 + if o.Member != nil { + mt = o.Member.LastUpdateAt + } + + return Etag(o.Channel.Id, o.Channel.UpdateAt, o.Channel.LastPostAt, mt) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/channel_list.go b/vendor/github.com/mattermost/mattermost/server/public/model/channel_list.go new file mode 100644 index 00000000..a343b977 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/channel_list.go @@ -0,0 +1,53 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +type ChannelList []*Channel + +func (o *ChannelList) Etag() string { + id := "0" + var t int64 + var delta int64 + + for _, v := range *o { + if v.LastPostAt > t { + t = v.LastPostAt + id = v.Id + } + + if v.UpdateAt > t { + t = v.UpdateAt + id = v.Id + } + } + + return Etag(id, t, delta, len(*o)) +} + +type ChannelListWithTeamData []*ChannelWithTeamData + +func (o *ChannelListWithTeamData) Etag() string { + id := "0" + var t int64 + var delta int64 + + for _, v := range *o { + if v.LastPostAt > t { + t = v.LastPostAt + id = v.Id + } + + if v.UpdateAt > t { + t = v.UpdateAt + id = v.Id + } + + if v.TeamUpdateAt > t { + t = v.TeamUpdateAt + id = v.Id + } + } + + return Etag(id, t, delta, len(*o)) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/channel_member.go b/vendor/github.com/mattermost/mattermost/server/public/model/channel_member.go new file mode 100644 index 00000000..da854db0 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/channel_member.go @@ -0,0 +1,255 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "fmt" + "net/http" + "strings" + "unicode/utf8" +) + +const ( + ChannelNotifyDefault = "default" + ChannelNotifyAll = "all" + ChannelNotifyMention = "mention" + ChannelNotifyNone = "none" + ChannelMarkUnreadAll = "all" + ChannelMarkUnreadMention = "mention" + IgnoreChannelMentionsDefault = "default" + IgnoreChannelMentionsOff = "off" + IgnoreChannelMentionsOn = "on" + IgnoreChannelMentionsNotifyProp = "ignore_channel_mentions" + ChannelAutoFollowThreadsOff = "off" + ChannelAutoFollowThreadsOn = "on" + ChannelAutoFollowThreads = "channel_auto_follow_threads" + ChannelMemberNotifyPropsMaxRunes = 800000 +) + +type ChannelUnread struct { + TeamId string `json:"team_id"` + ChannelId string `json:"channel_id"` + MsgCount int64 `json:"msg_count"` + MentionCount int64 `json:"mention_count"` + MentionCountRoot int64 `json:"mention_count_root"` + UrgentMentionCount int64 `json:"urgent_mention_count"` + MsgCountRoot int64 `json:"msg_count_root"` + NotifyProps StringMap `json:"-"` +} + +type ChannelUnreadAt struct { + TeamId string `json:"team_id"` + UserId string `json:"user_id"` + ChannelId string `json:"channel_id"` + MsgCount int64 `json:"msg_count"` + MentionCount int64 `json:"mention_count"` + MentionCountRoot int64 `json:"mention_count_root"` + UrgentMentionCount int64 `json:"urgent_mention_count"` + MsgCountRoot int64 `json:"msg_count_root"` + LastViewedAt int64 `json:"last_viewed_at"` + NotifyProps StringMap `json:"-"` +} + +type ChannelMember struct { + ChannelId string `json:"channel_id"` + UserId string `json:"user_id"` + Roles string `json:"roles"` + LastViewedAt int64 `json:"last_viewed_at"` + MsgCount int64 `json:"msg_count"` + MentionCount int64 `json:"mention_count"` + MentionCountRoot int64 `json:"mention_count_root"` + UrgentMentionCount int64 `json:"urgent_mention_count"` + MsgCountRoot int64 `json:"msg_count_root"` + NotifyProps StringMap `json:"notify_props"` + LastUpdateAt int64 `json:"last_update_at"` + SchemeGuest bool `json:"scheme_guest"` + SchemeUser bool `json:"scheme_user"` + SchemeAdmin bool `json:"scheme_admin"` + ExplicitRoles string `json:"explicit_roles"` +} + +func (o *ChannelMember) Auditable() map[string]any { + return map[string]any{ + "channel_id": o.ChannelId, + "user_id": o.UserId, + "roles": o.Roles, + "last_viewed_at": o.LastViewedAt, + "msg_count": o.MsgCount, + "mention_count": o.MentionCount, + "mention_count_root": o.MentionCountRoot, + "urgent_mention_count": o.UrgentMentionCount, + "msg_count_root": o.MsgCountRoot, + "notify_props": o.NotifyProps, + "last_update_at": o.LastUpdateAt, + "scheme_guest": o.SchemeGuest, + "scheme_user": o.SchemeUser, + "scheme_admin": o.SchemeAdmin, + "explicit_roles": o.ExplicitRoles, + } +} + +// SanitizeForCurrentUser sanitizes channel member data based on whether +// it's the current user's own membership or another user's membership +func (o *ChannelMember) SanitizeForCurrentUser(currentUserId string) { + // If this is not the current user's own membership, + // sanitize sensitive timestamp fields + if o.UserId != currentUserId { + o.LastViewedAt = -1 + o.LastUpdateAt = -1 + } +} + +// ChannelMemberWithTeamData contains ChannelMember appended with extra team information +// as well. +type ChannelMemberWithTeamData struct { + ChannelMember + TeamDisplayName string `json:"team_display_name"` + TeamName string `json:"team_name"` + TeamUpdateAt int64 `json:"team_update_at"` +} + +type ChannelMembers []ChannelMember + +type ChannelMembersWithTeamData []ChannelMemberWithTeamData + +type ChannelMemberForExport struct { + ChannelMember + ChannelName string + Username string +} + +type ChannelMemberCursor struct { + Page int // If page is -1, then FromChannelID is used as a cursor. + PerPage int + FromChannelID string +} + +func (o *ChannelMember) IsValid() *AppError { + if !IsValidId(o.ChannelId) { + return NewAppError("ChannelMember.IsValid", "model.channel_member.is_valid.channel_id.app_error", nil, "", http.StatusBadRequest) + } + + if !IsValidId(o.UserId) { + return NewAppError("ChannelMember.IsValid", "model.channel_member.is_valid.user_id.app_error", nil, "", http.StatusBadRequest) + } + + if appErr := IsChannelMemberNotifyPropsValid(o.NotifyProps, false); appErr != nil { + return appErr + } + + if len(o.Roles) > UserRolesMaxLength { + return NewAppError("ChannelMember.IsValid", "model.channel_member.is_valid.roles_limit.app_error", + map[string]any{"Limit": UserRolesMaxLength}, "", http.StatusBadRequest) + } + + return nil +} + +func IsChannelMemberNotifyPropsValid(notifyProps map[string]string, allowMissingFields bool) *AppError { + if notifyLevel, ok := notifyProps[DesktopNotifyProp]; ok || !allowMissingFields { + if len(notifyLevel) > 20 || !IsChannelNotifyLevelValid(notifyLevel) { + return NewAppError("ChannelMember.IsValid", "model.channel_member.is_valid.notify_level.app_error", nil, "notify_level="+notifyLevel, http.StatusBadRequest) + } + } + + if markUnreadLevel, ok := notifyProps[MarkUnreadNotifyProp]; ok || !allowMissingFields { + if len(markUnreadLevel) > 20 || !IsChannelMarkUnreadLevelValid(markUnreadLevel) { + return NewAppError("ChannelMember.IsValid", "model.channel_member.is_valid.unread_level.app_error", nil, "mark_unread_level="+markUnreadLevel, http.StatusBadRequest) + } + } + + if pushLevel, ok := notifyProps[PushNotifyProp]; ok { + if len(pushLevel) > 20 || !IsChannelNotifyLevelValid(pushLevel) { + return NewAppError("ChannelMember.IsValid", "model.channel_member.is_valid.push_level.app_error", nil, "push_notification_level="+pushLevel, http.StatusBadRequest) + } + } + + if sendEmail, ok := notifyProps[EmailNotifyProp]; ok { + if len(sendEmail) > 20 || !IsSendEmailValid(sendEmail) { + return NewAppError("ChannelMember.IsValid", "model.channel_member.is_valid.email_value.app_error", nil, "push_notification_level="+sendEmail, http.StatusBadRequest) + } + } + + if ignoreChannelMentions, ok := notifyProps[IgnoreChannelMentionsNotifyProp]; ok { + if len(ignoreChannelMentions) > 40 || !IsIgnoreChannelMentionsValid(ignoreChannelMentions) { + return NewAppError("ChannelMember.IsValid", "model.channel_member.is_valid.ignore_channel_mentions_value.app_error", nil, "ignore_channel_mentions="+ignoreChannelMentions, http.StatusBadRequest) + } + } + + if channelAutoFollowThreads, ok := notifyProps[ChannelAutoFollowThreads]; ok { + if len(channelAutoFollowThreads) > 3 || !IsChannelAutoFollowThreadsValid(channelAutoFollowThreads) { + return NewAppError("ChannelMember.IsValid", "model.channel_member.is_valid.channel_auto_follow_threads_value.app_error", nil, "channel_auto_follow_threads="+channelAutoFollowThreads, http.StatusBadRequest) + } + } + + jsonStringNotifyProps := string(ToJSON(notifyProps)) + if utf8.RuneCountInString(jsonStringNotifyProps) > ChannelMemberNotifyPropsMaxRunes { + return NewAppError("ChannelMember.IsValid", "model.channel_member.is_valid.notify_props.app_error", nil, fmt.Sprint("length=", utf8.RuneCountInString(jsonStringNotifyProps)), http.StatusBadRequest) + } + + return nil +} + +func (o *ChannelMember) PreSave() { + o.LastUpdateAt = GetMillis() +} + +func (o *ChannelMember) PreUpdate() { + o.LastUpdateAt = GetMillis() +} + +func (o *ChannelMember) GetRoles() []string { + return strings.Fields(o.Roles) +} + +func (o *ChannelMember) SetChannelMuted(muted bool) { + if o.IsChannelMuted() { + o.NotifyProps[MarkUnreadNotifyProp] = ChannelMarkUnreadAll + } else { + o.NotifyProps[MarkUnreadNotifyProp] = ChannelMarkUnreadMention + } +} + +func (o *ChannelMember) IsChannelMuted() bool { + return o.NotifyProps[MarkUnreadNotifyProp] == ChannelMarkUnreadMention +} + +func IsChannelNotifyLevelValid(notifyLevel string) bool { + return notifyLevel == ChannelNotifyDefault || + notifyLevel == ChannelNotifyAll || + notifyLevel == ChannelNotifyMention || + notifyLevel == ChannelNotifyNone +} + +func IsChannelMarkUnreadLevelValid(markUnreadLevel string) bool { + return markUnreadLevel == ChannelMarkUnreadAll || markUnreadLevel == ChannelMarkUnreadMention +} + +func IsSendEmailValid(sendEmail string) bool { + return sendEmail == ChannelNotifyDefault || sendEmail == "true" || sendEmail == "false" +} + +func IsIgnoreChannelMentionsValid(ignoreChannelMentions string) bool { + return ignoreChannelMentions == IgnoreChannelMentionsOn || ignoreChannelMentions == IgnoreChannelMentionsOff || ignoreChannelMentions == IgnoreChannelMentionsDefault +} + +func IsChannelAutoFollowThreadsValid(channelAutoFollowThreads string) bool { + return channelAutoFollowThreads == ChannelAutoFollowThreadsOn || channelAutoFollowThreads == ChannelAutoFollowThreadsOff +} + +func GetDefaultChannelNotifyProps() StringMap { + return StringMap{ + DesktopNotifyProp: ChannelNotifyDefault, + MarkUnreadNotifyProp: ChannelMarkUnreadAll, + PushNotifyProp: ChannelNotifyDefault, + EmailNotifyProp: ChannelNotifyDefault, + IgnoreChannelMentionsNotifyProp: IgnoreChannelMentionsDefault, + ChannelAutoFollowThreads: ChannelAutoFollowThreadsOff, + } +} + +type ChannelMemberIdentifier struct { + ChannelId string `json:"channel_id"` + UserId string `json:"user_id"` +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/channel_member_history.go b/vendor/github.com/mattermost/mattermost/server/public/model/channel_member_history.go new file mode 100644 index 00000000..b77e0ff9 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/channel_member_history.go @@ -0,0 +1,11 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +type ChannelMemberHistory struct { + ChannelId string + UserId string + JoinTime int64 + LeaveTime *int64 +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/channel_member_history_result.go b/vendor/github.com/mattermost/mattermost/server/public/model/channel_member_history_result.go new file mode 100644 index 00000000..8f43ca4e --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/channel_member_history_result.go @@ -0,0 +1,17 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +type ChannelMemberHistoryResult struct { + ChannelId string + UserId string + JoinTime int64 + LeaveTime *int64 + + // these two fields are never set in the database - when we SELECT, we join on Users to get them + UserEmail string `db:"Email"` + Username string + IsBot bool + UserDeleteAt int64 +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/channel_mentions.go b/vendor/github.com/mattermost/mattermost/server/public/model/channel_mentions.go new file mode 100644 index 00000000..eb14e8ed --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/channel_mentions.go @@ -0,0 +1,28 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "regexp" + "strings" +) + +var channelMentionRegexp = regexp.MustCompile(`\B~[a-zA-Z0-9\-_]+`) + +func ChannelMentions(message string) []string { + var names []string + + if strings.Contains(message, "~") { + alreadyMentioned := make(map[string]bool) + for _, match := range channelMentionRegexp.FindAllString(message, -1) { + name := match[1:] + if !alreadyMentioned[name] { + names = append(names, name) + alreadyMentioned[name] = true + } + } + } + + return names +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/channel_search.go b/vendor/github.com/mattermost/mattermost/server/public/model/channel_search.go new file mode 100644 index 00000000..a3ed0273 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/channel_search.go @@ -0,0 +1,27 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +const ChannelSearchDefaultLimit = 50 + +type ChannelSearch struct { + Term string `json:"term"` + ExcludeDefaultChannels bool `json:"exclude_default_channels"` + NotAssociatedToGroup string `json:"not_associated_to_group"` + TeamIds []string `json:"team_ids"` + GroupConstrained bool `json:"group_constrained"` + ExcludeGroupConstrained bool `json:"exclude_group_constrained"` + ExcludePolicyConstrained bool `json:"exclude_policy_constrained"` + Public bool `json:"public"` + Private bool `json:"private"` + IncludeDeleted bool `json:"include_deleted"` + IncludeSearchById bool `json:"include_search_by_id"` + ExcludeRemote bool `json:"exclude_remote"` + Deleted bool `json:"deleted"` + Page *int `json:"page,omitempty"` + PerPage *int `json:"per_page,omitempty"` + AccessControlPolicyEnforced bool `json:"access_control_policy_enforced"` + ExcludeAccessControlPolicyEnforced bool `json:"exclude_access_control_policy_enforced"` + ParentAccessControlPolicyId string `json:"parent_access_control_policy_id"` +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/channel_sidebar.go b/vendor/github.com/mattermost/mattermost/server/public/model/channel_sidebar.go new file mode 100644 index 00000000..8547fc32 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/channel_sidebar.go @@ -0,0 +1,97 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "encoding/json" + "regexp" +) + +type SidebarCategoryType string +type SidebarCategorySorting string + +const ( + // Each sidebar category has a 'type'. System categories are Channels, Favorites and DMs + // All user-created categories will have type Custom + SidebarCategoryChannels SidebarCategoryType = "channels" + SidebarCategoryDirectMessages SidebarCategoryType = "direct_messages" + SidebarCategoryFavorites SidebarCategoryType = "favorites" + SidebarCategoryCustom SidebarCategoryType = "custom" + // Increment to use when adding/reordering things in the sidebar + MinimalSidebarSortDistance = 10 + // Default Sort Orders for categories + DefaultSidebarSortOrderFavorites = 0 + DefaultSidebarSortOrderChannels = DefaultSidebarSortOrderFavorites + MinimalSidebarSortDistance + DefaultSidebarSortOrderDMs = DefaultSidebarSortOrderChannels + MinimalSidebarSortDistance + // Sorting modes + // default for all categories except DMs (behaves like manual) + SidebarCategorySortDefault SidebarCategorySorting = "" + // sort manually + SidebarCategorySortManual SidebarCategorySorting = "manual" + // sort by recency (default for DMs) + SidebarCategorySortRecent SidebarCategorySorting = "recent" + // sort by display name alphabetically + SidebarCategorySortAlphabetical SidebarCategorySorting = "alpha" +) + +// SidebarCategory represents the corresponding DB table +type SidebarCategory struct { + Id string `json:"id"` + UserId string `json:"user_id"` + TeamId string `json:"team_id"` + SortOrder int64 `json:"sort_order"` + Sorting SidebarCategorySorting `json:"sorting"` + Type SidebarCategoryType `json:"type"` + DisplayName string `json:"display_name"` + Muted bool `json:"muted"` + Collapsed bool `json:"collapsed"` +} + +// SidebarCategoryWithChannels combines data from SidebarCategory table with the Channel IDs that belong to that category +type SidebarCategoryWithChannels struct { + SidebarCategory + Channels []string `json:"channel_ids"` +} + +func (sc SidebarCategoryWithChannels) ChannelIds() []string { + return sc.Channels +} + +type SidebarCategoryOrder []string + +// OrderedSidebarCategories combines categories, their channel IDs and an array of Category IDs, sorted +type OrderedSidebarCategories struct { + Categories SidebarCategoriesWithChannels `json:"categories"` + Order SidebarCategoryOrder `json:"order"` +} + +type SidebarChannel struct { + ChannelId string `json:"channel_id"` + UserId string `json:"user_id"` + CategoryId string `json:"category_id"` + SortOrder int64 `json:"-"` +} + +type SidebarChannels []*SidebarChannel +type SidebarCategoriesWithChannels []*SidebarCategoryWithChannels + +var categoryIdPattern = regexp.MustCompile("(favorites|channels|direct_messages)_[a-z0-9]{26}_[a-z0-9]{26}") + +func IsValidCategoryId(s string) bool { + // Category IDs can either be regular IDs + if IsValidId(s) { + return true + } + + // Or default categories can follow the pattern {type}_{userID}_{teamID} + return categoryIdPattern.MatchString(s) +} + +func (t SidebarCategoryType) MarshalJSON() ([]byte, error) { + return json.Marshal(string(t)) +} + +func (t SidebarCategorySorting) MarshalJSON() ([]byte, error) { + return json.Marshal(string(t)) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/channel_stats.go b/vendor/github.com/mattermost/mattermost/server/public/model/channel_stats.go new file mode 100644 index 00000000..96631c11 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/channel_stats.go @@ -0,0 +1,24 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +type ChannelStats struct { + ChannelId string `json:"channel_id"` + MemberCount int64 `json:"member_count"` + GuestCount int64 `json:"guest_count"` + PinnedPostCount int64 `json:"pinnedpost_count"` + FilesCount int64 `json:"files_count"` +} + +func (o *ChannelStats) MemberCount_() float64 { + return float64(o.MemberCount) +} + +func (o *ChannelStats) GuestCount_() float64 { + return float64(o.GuestCount) +} + +func (o *ChannelStats) PinnedPostCount_() float64 { + return float64(o.PinnedPostCount) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/channel_view.go b/vendor/github.com/mattermost/mattermost/server/public/model/channel_view.go new file mode 100644 index 00000000..c34c438d --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/channel_view.go @@ -0,0 +1,15 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +type ChannelView struct { + ChannelId string `json:"channel_id"` + PrevChannelId string `json:"prev_channel_id"` + CollapsedThreadsSupported bool `json:"collapsed_threads_supported"` +} + +type ChannelViewResponse struct { + Status string `json:"status"` + LastViewedAtTimes map[string]int64 `json:"last_viewed_at_times"` +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/client4.go b/vendor/github.com/mattermost/mattermost/server/public/model/client4.go new file mode 100644 index 00000000..c3aa2a0e --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/client4.go @@ -0,0 +1,7742 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "bufio" + "bytes" + "context" + "encoding/json" + "errors" + "fmt" + "io" + "mime" + "mime/multipart" + "net" + "net/http" + "net/url" + "strconv" + "strings" +) + +const ( + HeaderRequestId = "X-Request-ID" + HeaderVersionId = "X-Version-ID" + HeaderClusterId = "X-Cluster-ID" + HeaderEtagServer = "ETag" + HeaderEtagClient = "If-None-Match" + HeaderForwarded = "X-Forwarded-For" + HeaderRealIP = "X-Real-IP" + HeaderForwardedProto = "X-Forwarded-Proto" + HeaderToken = "token" + HeaderCsrfToken = "X-CSRF-Token" + HeaderBearer = "BEARER" + HeaderAuth = "Authorization" + HeaderCloudToken = "X-Cloud-Token" + HeaderRemoteclusterToken = "X-RemoteCluster-Token" + HeaderRemoteclusterId = "X-RemoteCluster-Id" + HeaderRequestedWith = "X-Requested-With" + HeaderRequestedWithXML = "XMLHttpRequest" + HeaderFirstInaccessiblePostTime = "First-Inaccessible-Post-Time" + HeaderFirstInaccessibleFileTime = "First-Inaccessible-File-Time" + HeaderRange = "Range" + STATUS = "status" + StatusOk = "OK" + StatusFail = "FAIL" + StatusUnhealthy = "UNHEALTHY" + StatusRemove = "REMOVE" + ConnectionId = "Connection-Id" + + ClientDir = "client" + + APIURLSuffixV1 = "/api/v1" + APIURLSuffixV4 = "/api/v4" + APIURLSuffixV5 = "/api/v5" + APIURLSuffix = APIURLSuffixV4 +) + +type Response struct { + StatusCode int + RequestId string + Etag string + ServerVersion string + Header http.Header +} + +type Client4 struct { + URL string // The location of the server, for example "http://localhost:8065" + APIURL string // The api location of the server, for example "http://localhost:8065/api/v4" + HTTPClient *http.Client // The http client + AuthToken string + AuthType string + HTTPHeader map[string]string // Headers to be copied over for each request + + // TrueString is the string value sent to the server for true boolean query parameters. + trueString string + + // FalseString is the string value sent to the server for false boolean query parameters. + falseString string +} + +// SetBoolString is a helper method for overriding how true and false query string parameters are +// sent to the server. +// +// This method is only exposed for testing. It is never necessary to configure these values +// in production. +func (c *Client4) SetBoolString(value bool, valueStr string) { + if value { + c.trueString = valueStr + } else { + c.falseString = valueStr + } +} + +// boolString builds the query string parameter for boolean values. +func (c *Client4) boolString(value bool) string { + if value && c.trueString != "" { + return c.trueString + } else if !value && c.falseString != "" { + return c.falseString + } + + if value { + return "true" + } + return "false" +} + +func closeBody(r *http.Response) { + if r.Body != nil { + _, _ = io.Copy(io.Discard, r.Body) + _ = r.Body.Close() + } +} + +func NewAPIv4Client(url string) *Client4 { + url = strings.TrimRight(url, "/") + return &Client4{url, url + APIURLSuffix, &http.Client{}, "", "", map[string]string{}, "", ""} +} + +func NewAPIv4SocketClient(socketPath string) *Client4 { + tr := &http.Transport{ + Dial: func(network, addr string) (net.Conn, error) { + return net.Dial("unix", socketPath) + }, + } + + client := NewAPIv4Client("http://_") + client.HTTPClient = &http.Client{Transport: tr} + + return client +} + +func BuildResponse(r *http.Response) *Response { + if r == nil { + return nil + } + + return &Response{ + StatusCode: r.StatusCode, + RequestId: r.Header.Get(HeaderRequestId), + Etag: r.Header.Get(HeaderEtagServer), + ServerVersion: r.Header.Get(HeaderVersionId), + Header: r.Header, + } +} + +// DecodeJSONFromResponse decodes JSON from an HTTP response and returns the result. +// Handles 304 Not Modified responses and calls [BuildResponse] automatically. +func DecodeJSONFromResponse[T any](r *http.Response) (T, *Response, error) { + var result T + if r.StatusCode == http.StatusNotModified { + return result, BuildResponse(r), nil + } + if err := json.NewDecoder(r.Body).Decode(&result); err != nil { + return result, BuildResponse(r), fmt.Errorf("failed to decode JSON response: %w", err) + } + return result, BuildResponse(r), nil +} + +// ReadBytesFromResponse reads all bytes from an HTTP response body and returns them. +// Handles 304 Not Modified responses and calls [BuildResponse] automatically. +func ReadBytesFromResponse(r *http.Response) ([]byte, *Response, error) { + if r.StatusCode == http.StatusNotModified { + return nil, BuildResponse(r), nil + } + data, err := io.ReadAll(r.Body) + if err != nil { + return nil, BuildResponse(r), err + } + return data, BuildResponse(r), nil +} + +func (c *Client4) SetToken(token string) { + c.AuthToken = token + c.AuthType = HeaderBearer +} + +// MockSession is deprecated in favour of SetToken +func (c *Client4) MockSession(token string) { + c.SetToken(token) +} + +func (c *Client4) SetOAuthToken(token string) { + c.AuthToken = token + c.AuthType = HeaderToken +} + +func (c *Client4) ClearOAuthToken() { + c.AuthToken = "" + c.AuthType = HeaderBearer +} + +func (c *Client4) usersRoute() string { + return "/users" +} + +func (c *Client4) reportsRoute() string { + return "/reports" +} + +func (c *Client4) userRoute(userId string) string { + return fmt.Sprintf(c.usersRoute()+"/%v", userId) +} + +func (c *Client4) userThreadsRoute(userID, teamID string) string { + return c.userRoute(userID) + c.teamRoute(teamID) + "/threads" +} + +func (c *Client4) userThreadRoute(userId, teamId, threadId string) string { + return c.userThreadsRoute(userId, teamId) + "/" + threadId +} + +func (c *Client4) userCategoryRoute(userID, teamID string) string { + return c.userRoute(userID) + c.teamRoute(teamID) + "/channels/categories" +} + +func (c *Client4) userAccessTokensRoute() string { + return c.usersRoute() + "/tokens" +} + +func (c *Client4) userAccessTokenRoute(tokenId string) string { + return fmt.Sprintf(c.usersRoute()+"/tokens/%v", tokenId) +} + +func (c *Client4) userByUsernameRoute(userName string) string { + return fmt.Sprintf(c.usersRoute()+"/username/%v", userName) +} + +func (c *Client4) userByEmailRoute(email string) string { + return fmt.Sprintf(c.usersRoute()+"/email/%v", email) +} + +func (c *Client4) botsRoute() string { + return "/bots" +} + +func (c *Client4) botRoute(botUserId string) string { + return fmt.Sprintf("%s/%s", c.botsRoute(), botUserId) +} + +func (c *Client4) teamsRoute() string { + return "/teams" +} + +func (c *Client4) teamRoute(teamId string) string { + return fmt.Sprintf(c.teamsRoute()+"/%v", teamId) +} + +func (c *Client4) teamAutoCompleteCommandsRoute(teamId string) string { + return fmt.Sprintf(c.teamsRoute()+"/%v/commands/autocomplete", teamId) +} + +func (c *Client4) teamByNameRoute(teamName string) string { + return fmt.Sprintf(c.teamsRoute()+"/name/%v", teamName) +} + +func (c *Client4) teamMemberRoute(teamId, userId string) string { + return fmt.Sprintf(c.teamRoute(teamId)+"/members/%v", userId) +} + +func (c *Client4) teamMembersRoute(teamId string) string { + return c.teamRoute(teamId) + "/members" +} + +func (c *Client4) teamStatsRoute(teamId string) string { + return c.teamRoute(teamId) + "/stats" +} + +func (c *Client4) teamImportRoute(teamId string) string { + return c.teamRoute(teamId) + "/import" +} + +func (c *Client4) channelsRoute() string { + return "/channels" +} + +func (c *Client4) channelsForTeamRoute(teamId string) string { + return c.teamRoute(teamId) + "/channels" +} + +func (c *Client4) channelRoute(channelId string) string { + return fmt.Sprintf(c.channelsRoute()+"/%v", channelId) +} + +func (c *Client4) channelByNameRoute(channelName, teamId string) string { + return fmt.Sprintf(c.teamRoute(teamId)+"/channels/name/%v", channelName) +} + +func (c *Client4) channelsForTeamForUserRoute(teamId, userId string) string { + return c.userRoute(userId) + c.teamRoute(teamId) + "/channels" +} + +func (c *Client4) channelByNameForTeamNameRoute(channelName, teamName string) string { + return fmt.Sprintf(c.teamByNameRoute(teamName)+"/channels/name/%v", channelName) +} + +func (c *Client4) channelMembersRoute(channelId string) string { + return c.channelRoute(channelId) + "/members" +} + +func (c *Client4) channelMemberRoute(channelId, userId string) string { + return fmt.Sprintf(c.channelMembersRoute(channelId)+"/%v", userId) +} + +func (c *Client4) postsRoute() string { + return "/posts" +} + +func (c *Client4) contentFlaggingRoute() string { + return "/content_flagging" +} + +func (c *Client4) postsEphemeralRoute() string { + return "/posts/ephemeral" +} + +func (c *Client4) configRoute() string { + return "/config" +} + +func (c *Client4) licenseRoute() string { + return "/license" +} + +func (c *Client4) postRoute(postId string) string { + return fmt.Sprintf(c.postsRoute()+"/%v", postId) +} + +func (c *Client4) filesRoute() string { + return "/files" +} + +func (c *Client4) fileRoute(fileId string) string { + return fmt.Sprintf(c.filesRoute()+"/%v", fileId) +} + +func (c *Client4) uploadsRoute() string { + return "/uploads" +} + +func (c *Client4) uploadRoute(uploadId string) string { + return fmt.Sprintf("%s/%s", c.uploadsRoute(), uploadId) +} + +func (c *Client4) pluginsRoute() string { + return "/plugins" +} + +func (c *Client4) pluginRoute(pluginId string) string { + return fmt.Sprintf(c.pluginsRoute()+"/%v", pluginId) +} + +func (c *Client4) systemRoute() string { + return "/system" +} + +func (c *Client4) cloudRoute() string { + return "/cloud" +} + +func (c *Client4) testEmailRoute() string { + return "/email/test" +} + +func (c *Client4) testNotificationRoute() string { + return "/notifications/test" +} + +func (c *Client4) usageRoute() string { + return "/usage" +} + +func (c *Client4) testSiteURLRoute() string { + return "/site_url/test" +} + +func (c *Client4) testS3Route() string { + return "/file/s3_test" +} + +func (c *Client4) databaseRoute() string { + return "/database" +} + +func (c *Client4) cacheRoute() string { + return "/caches" +} + +func (c *Client4) clusterRoute() string { + return "/cluster" +} + +func (c *Client4) incomingWebhooksRoute() string { + return "/hooks/incoming" +} + +func (c *Client4) incomingWebhookRoute(hookID string) string { + return fmt.Sprintf(c.incomingWebhooksRoute()+"/%v", hookID) +} + +func (c *Client4) complianceReportsRoute() string { + return "/compliance/reports" +} + +func (c *Client4) complianceReportRoute(reportId string) string { + return fmt.Sprintf("%s/%s", c.complianceReportsRoute(), reportId) +} + +func (c *Client4) complianceReportDownloadRoute(reportId string) string { + return fmt.Sprintf("%s/%s/download", c.complianceReportsRoute(), reportId) +} + +func (c *Client4) outgoingWebhooksRoute() string { + return "/hooks/outgoing" +} + +func (c *Client4) outgoingWebhookRoute(hookID string) string { + return fmt.Sprintf(c.outgoingWebhooksRoute()+"/%v", hookID) +} + +func (c *Client4) preferencesRoute(userId string) string { + return c.userRoute(userId) + "/preferences" +} + +func (c *Client4) userStatusRoute(userId string) string { + return c.userRoute(userId) + "/status" +} + +func (c *Client4) userStatusesRoute() string { + return c.usersRoute() + "/status" +} + +func (c *Client4) samlRoute() string { + return "/saml" +} + +func (c *Client4) ldapRoute() string { + return "/ldap" +} + +func (c *Client4) brandRoute() string { + return "/brand" +} + +func (c *Client4) dataRetentionRoute() string { + return "/data_retention" +} + +func (c *Client4) dataRetentionPolicyRoute(policyID string) string { + return fmt.Sprintf(c.dataRetentionRoute()+"/policies/%v", policyID) +} + +func (c *Client4) elasticsearchRoute() string { + return "/elasticsearch" +} + +func (c *Client4) commandsRoute() string { + return "/commands" +} + +func (c *Client4) commandRoute(commandId string) string { + return fmt.Sprintf(c.commandsRoute()+"/%v", commandId) +} + +func (c *Client4) commandMoveRoute(commandId string) string { + return fmt.Sprintf(c.commandsRoute()+"/%v/move", commandId) +} + +func (c *Client4) draftsRoute() string { + return "/drafts" +} + +func (c *Client4) emojisRoute() string { + return "/emoji" +} + +func (c *Client4) emojiRoute(emojiId string) string { + return fmt.Sprintf(c.emojisRoute()+"/%v", emojiId) +} + +func (c *Client4) emojiByNameRoute(name string) string { + return fmt.Sprintf(c.emojisRoute()+"/name/%v", name) +} + +func (c *Client4) reactionsRoute() string { + return "/reactions" +} + +func (c *Client4) oAuthAppsRoute() string { + return "/oauth/apps" +} + +func (c *Client4) oAuthAppRoute(appId string) string { + return fmt.Sprintf("/oauth/apps/%v", appId) +} + +func (c *Client4) oAuthRegisterRoute() string { + return "/oauth/apps/register" +} + +func (c *Client4) outgoingOAuthConnectionsRoute() string { + return "/oauth/outgoing_connections" +} + +func (c *Client4) outgoingOAuthConnectionRoute(id string) string { + return fmt.Sprintf("%s/%s", c.outgoingOAuthConnectionsRoute(), id) +} + +func (c *Client4) jobsRoute() string { + return "/jobs" +} + +func (c *Client4) rolesRoute() string { + return "/roles" +} + +func (c *Client4) schemesRoute() string { + return "/schemes" +} + +func (c *Client4) schemeRoute(id string) string { + return c.schemesRoute() + fmt.Sprintf("/%v", id) +} + +func (c *Client4) analyticsRoute() string { + return "/analytics" +} + +func (c *Client4) timezonesRoute() string { + return c.systemRoute() + "/timezones" +} + +func (c *Client4) channelSchemeRoute(channelId string) string { + return fmt.Sprintf(c.channelsRoute()+"/%v/scheme", channelId) +} + +func (c *Client4) teamSchemeRoute(teamId string) string { + return fmt.Sprintf(c.teamsRoute()+"/%v/scheme", teamId) +} + +func (c *Client4) totalUsersStatsRoute() string { + return c.usersRoute() + "/stats" +} + +func (c *Client4) redirectLocationRoute() string { + return "/redirect_location" +} + +func (c *Client4) serverBusyRoute() string { + return "/server_busy" +} + +func (c *Client4) userTermsOfServiceRoute(userId string) string { + return c.userRoute(userId) + "/terms_of_service" +} + +func (c *Client4) termsOfServiceRoute() string { + return "/terms_of_service" +} + +func (c *Client4) groupsRoute() string { + return "/groups" +} + +func (c *Client4) publishUserTypingRoute(userId string) string { + return c.userRoute(userId) + "/typing" +} + +func (c *Client4) groupRoute(groupID string) string { + return fmt.Sprintf("%s/%s", c.groupsRoute(), groupID) +} + +func (c *Client4) groupSyncableRoute(groupID, syncableID string, syncableType GroupSyncableType) string { + return fmt.Sprintf("%s/%ss/%s", c.groupRoute(groupID), strings.ToLower(syncableType.String()), syncableID) +} + +func (c *Client4) groupSyncablesRoute(groupID string, syncableType GroupSyncableType) string { + return fmt.Sprintf("%s/%ss", c.groupRoute(groupID), strings.ToLower(syncableType.String())) +} + +func (c *Client4) importsRoute() string { + return "/imports" +} + +func (c *Client4) exportsRoute() string { + return "/exports" +} + +func (c *Client4) exportRoute(name string) string { + return fmt.Sprintf(c.exportsRoute()+"/%v", name) +} + +func (c *Client4) importRoute(name string) string { + return fmt.Sprintf(c.importsRoute()+"/%v", name) +} + +func (c *Client4) remoteClusterRoute() string { + return "/remotecluster" +} + +func (c *Client4) sharedChannelRemotesRoute(remoteId string) string { + return fmt.Sprintf("%s/%s/sharedchannelremotes", c.remoteClusterRoute(), remoteId) +} + +func (c *Client4) channelRemoteRoute(remoteId, channelId string) string { + return fmt.Sprintf("%s/%s/channels/%s", c.remoteClusterRoute(), remoteId, channelId) +} + +func (c *Client4) sharedChannelsRoute() string { + return "/sharedchannels" +} + +func (c *Client4) ipFiltersRoute() string { + return "/ip_filtering" +} + +func (c *Client4) permissionsRoute() string { + return "/permissions" +} + +func (c *Client4) limitsRoute() string { + return "/limits" +} + +func (c *Client4) customProfileAttributesRoute() string { + return "/custom_profile_attributes" +} + +func (c *Client4) bookmarksRoute(channelId string) string { + return c.channelRoute(channelId) + "/bookmarks" +} + +func (c *Client4) bookmarkRoute(channelId, bookmarkId string) string { + return fmt.Sprintf(c.bookmarksRoute(channelId)+"/%v", bookmarkId) +} + +func (c *Client4) clientPerfMetricsRoute() string { + return "/client_perf" +} + +func (c *Client4) userCustomProfileAttributesRoute(userID string) string { + return fmt.Sprintf("%s/custom_profile_attributes", c.userRoute(userID)) +} + +func (c *Client4) customProfileAttributeFieldsRoute() string { + return fmt.Sprintf("%s/fields", c.customProfileAttributesRoute()) +} + +func (c *Client4) customProfileAttributeFieldRoute(fieldID string) string { + return fmt.Sprintf("%s/%s", c.customProfileAttributeFieldsRoute(), fieldID) +} + +func (c *Client4) customProfileAttributeValuesRoute() string { + return fmt.Sprintf("%s/values", c.customProfileAttributesRoute()) +} + +func (c *Client4) accessControlPoliciesRoute() string { + return "/access_control_policies" +} + +func (c *Client4) celRoute() string { + return c.accessControlPoliciesRoute() + "/cel" +} + +func (c *Client4) accessControlPolicyRoute(policyID string) string { + return fmt.Sprintf(c.accessControlPoliciesRoute()+"/%v", url.PathEscape(policyID)) +} + +// Returns the HTTP response or any error that occurred during the request. +func (c *Client4) DoAPIGet(ctx context.Context, url string, etag string) (*http.Response, error) { + return c.doAPIRequest(ctx, http.MethodGet, c.APIURL+url, "", etag) +} + +// DoAPIPost makes a POST request to the specified URL with optional string data. +// Returns the HTTP response or any error that occurred during the request. +func (c *Client4) DoAPIPost(ctx context.Context, url, data string) (*http.Response, error) { + return c.doAPIRequest(ctx, http.MethodPost, c.APIURL+url, data, "") +} + +// DoAPIPostJSON marshals the provided data to JSON and makes a POST request to the specified URL. +// Returns the HTTP response or any error that occurred during marshaling or request. +func (c *Client4) DoAPIPostJSON(ctx context.Context, url string, data any) (*http.Response, error) { + buf, err := json.Marshal(data) + if err != nil { + return nil, err + } + return c.doAPIRequestBytes(ctx, http.MethodPost, c.APIURL+url, buf, "") +} + +// DoAPIPut makes a PUT request to the specified URL with optional string data. +// Returns the HTTP response or any error that occurred during the request. +func (c *Client4) DoAPIPut(ctx context.Context, url, data string) (*http.Response, error) { + return c.doAPIRequest(ctx, http.MethodPut, c.APIURL+url, data, "") +} + +// DoAPIPutJSON marshals the provided data to JSON and makes a PUT request to the specified URL. +// Returns the HTTP response or any error that occurred during marshaling or request. +func (c *Client4) DoAPIPutJSON(ctx context.Context, url string, data any) (*http.Response, error) { + buf, err := json.Marshal(data) + if err != nil { + return nil, err + } + return c.doAPIRequestBytes(ctx, http.MethodPut, c.APIURL+url, buf, "") +} + +// DoAPIPatchJSON marshals the provided data to JSON and makes a PATCH request to the specified URL. +// Returns the HTTP response or any error that occurred during marshaling or request. +func (c *Client4) DoAPIPatchJSON(ctx context.Context, url string, data any) (*http.Response, error) { + buf, err := json.Marshal(data) + if err != nil { + return nil, err + } + return c.doAPIRequestBytes(ctx, http.MethodPatch, c.APIURL+url, buf, "") +} + +// DoAPIDelete makes a DELETE request to the specified URL. +// Returns the HTTP response or any error that occurred during the request. +func (c *Client4) DoAPIDelete(ctx context.Context, url string) (*http.Response, error) { + return c.doAPIRequest(ctx, http.MethodDelete, c.APIURL+url, "", "") +} + +// DoAPIDeleteJSON marshals the provided data to JSON and makes a DELETE request to the specified URL. +// Returns the HTTP response or any error that occurred during marshaling or request. +func (c *Client4) DoAPIDeleteJSON(ctx context.Context, url string, data any) (*http.Response, error) { + buf, err := json.Marshal(data) + if err != nil { + return nil, err + } + return c.doAPIRequestBytes(ctx, http.MethodDelete, c.APIURL+url, buf, "") +} + +// DoAPIRequestWithHeaders makes an HTTP request with the specified method, URL, and custom headers. +// Returns the HTTP response or any error that occurred during the request. +func (c *Client4) DoAPIRequestWithHeaders(ctx context.Context, method, url, data string, headers map[string]string) (*http.Response, error) { + return c.doAPIRequestReader(ctx, method, url, "", strings.NewReader(data), headers) +} + +func (c *Client4) doAPIRequest(ctx context.Context, method, url, data, etag string) (*http.Response, error) { + return c.doAPIRequestReader(ctx, method, url, "", strings.NewReader(data), map[string]string{HeaderEtagClient: etag}) +} + +func (c *Client4) doAPIRequestBytes(ctx context.Context, method, url string, data []byte, etag string) (*http.Response, error) { + return c.doAPIRequestReader(ctx, method, url, "", bytes.NewReader(data), map[string]string{HeaderEtagClient: etag}) +} + +// doAPIRequestReader makes an HTTP request using an io.Reader for the request body and custom headers. +// This is the most flexible DoAPI method, supporting streaming data and custom headers. +// Returns the HTTP response or any error that occurred during the request. +func (c *Client4) doAPIRequestReader(ctx context.Context, method, url, contentType string, data io.Reader, headers map[string]string) (*http.Response, error) { + rq, err := http.NewRequestWithContext(ctx, method, url, data) + if err != nil { + return nil, err + } + + for k, v := range headers { + rq.Header.Set(k, v) + } + + if c.AuthToken != "" { + rq.Header.Set(HeaderAuth, c.AuthType+" "+c.AuthToken) + } + + if contentType != "" { + rq.Header.Set("Content-Type", contentType) + } + + if len(c.HTTPHeader) > 0 { + for k, v := range c.HTTPHeader { + rq.Header.Set(k, v) + } + } + + rp, err := c.HTTPClient.Do(rq) + if err != nil { + return rp, err + } + + if rp.StatusCode == 304 { + return rp, nil + } + + if rp.StatusCode >= 300 { + defer closeBody(rp) + return rp, AppErrorFromJSON(rp.Body) + } + + return rp, nil +} + +func (c *Client4) DoUploadFile(ctx context.Context, url string, data []byte, contentType string) (*FileUploadResponse, *Response, error) { + r, err := c.doAPIRequestReader(ctx, http.MethodPost, c.APIURL+url, contentType, bytes.NewReader(data), nil) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*FileUploadResponse](r) +} + +// Authentication Section + +// LoginById authenticates a user by user id and password. +func (c *Client4) LoginById(ctx context.Context, id string, password string) (*User, *Response, error) { + m := make(map[string]string) + m["id"] = id + m["password"] = password + return c.login(ctx, m) +} + +// Login authenticates a user by login id, which can be username, email or some sort +// of SSO identifier based on server configuration, and a password. +func (c *Client4) Login(ctx context.Context, loginId string, password string) (*User, *Response, error) { + m := make(map[string]string) + m["login_id"] = loginId + m["password"] = password + return c.login(ctx, m) +} + +// LoginByLdap authenticates a user by LDAP id and password. +func (c *Client4) LoginByLdap(ctx context.Context, loginId string, password string) (*User, *Response, error) { + m := make(map[string]string) + m["login_id"] = loginId + m["password"] = password + m["ldap_only"] = c.boolString(true) + return c.login(ctx, m) +} + +// LoginWithDevice authenticates a user by login id (username, email or some sort +// of SSO identifier based on configuration), password and attaches a device id to +// the session. +func (c *Client4) LoginWithDevice(ctx context.Context, loginId string, password string, deviceId string) (*User, *Response, error) { + m := make(map[string]string) + m["login_id"] = loginId + m["password"] = password + m["device_id"] = deviceId + return c.login(ctx, m) +} + +// LoginWithMFA logs a user in with a MFA token +func (c *Client4) LoginWithMFA(ctx context.Context, loginId, password, mfaToken string) (*User, *Response, error) { + m := make(map[string]string) + m["login_id"] = loginId + m["password"] = password + m["token"] = mfaToken + return c.login(ctx, m) +} + +func (c *Client4) login(ctx context.Context, m map[string]string) (*User, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, "/users/login", m) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + c.AuthToken = r.Header.Get(HeaderToken) + c.AuthType = HeaderBearer + + return DecodeJSONFromResponse[*User](r) +} + +func (c *Client4) LoginWithDesktopToken(ctx context.Context, token, deviceId string) (*User, *Response, error) { + m := make(map[string]string) + m["token"] = token + m["deviceId"] = deviceId + r, err := c.DoAPIPostJSON(ctx, "/users/login/desktop_token", m) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + c.AuthToken = r.Header.Get(HeaderToken) + c.AuthType = HeaderBearer + + return DecodeJSONFromResponse[*User](r) +} + +// Logout terminates the current user's session. +func (c *Client4) Logout(ctx context.Context) (*Response, error) { + r, err := c.DoAPIPost(ctx, "/users/logout", "") + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + c.AuthToken = "" + c.AuthType = HeaderBearer + return BuildResponse(r), nil +} + +// SwitchAccountType changes a user's login type from one type to another. +func (c *Client4) SwitchAccountType(ctx context.Context, switchRequest *SwitchRequest) (string, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.usersRoute()+"/login/switch", switchRequest) + if err != nil { + return "", BuildResponse(r), err + } + defer closeBody(r) + result, resp, err := DecodeJSONFromResponse[map[string]string](r) + if err != nil { + return "", resp, err + } + return result["follow_link"], resp, nil +} + +// User Section + +// CreateUser creates a user in the system based on the provided user struct. +func (c *Client4) CreateUser(ctx context.Context, user *User) (*User, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.usersRoute(), user) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*User](r) +} + +// CreateUserWithToken creates a user in the system based on the provided tokenId. +func (c *Client4) CreateUserWithToken(ctx context.Context, user *User, tokenId string) (*User, *Response, error) { + if tokenId == "" { + return nil, nil, errors.New("token ID is required") + } + + values := url.Values{} + values.Set("t", tokenId) + r, err := c.DoAPIPostJSON(ctx, c.usersRoute()+"?"+values.Encode(), user) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*User](r) +} + +// CreateUserWithInviteId creates a user in the system based on the provided invited id. +func (c *Client4) CreateUserWithInviteId(ctx context.Context, user *User, inviteId string) (*User, *Response, error) { + if inviteId == "" { + return nil, nil, errors.New("invite ID is required") + } + + values := url.Values{} + values.Set("iid", inviteId) + r, err := c.DoAPIPostJSON(ctx, c.usersRoute()+"?"+values.Encode(), user) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*User](r) +} + +// GetMe returns the logged in user. +func (c *Client4) GetMe(ctx context.Context, etag string) (*User, *Response, error) { + r, err := c.DoAPIGet(ctx, c.userRoute(Me), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*User](r) +} + +// GetUser returns a user based on the provided user id string. +func (c *Client4) GetUser(ctx context.Context, userId, etag string) (*User, *Response, error) { + r, err := c.DoAPIGet(ctx, c.userRoute(userId), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*User](r) +} + +// GetUserByUsername returns a user based on the provided user name string. +func (c *Client4) GetUserByUsername(ctx context.Context, userName, etag string) (*User, *Response, error) { + r, err := c.DoAPIGet(ctx, c.userByUsernameRoute(userName), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*User](r) +} + +// GetUserByEmail returns a user based on the provided user email string. +func (c *Client4) GetUserByEmail(ctx context.Context, email, etag string) (*User, *Response, error) { + r, err := c.DoAPIGet(ctx, c.userByEmailRoute(email), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*User](r) +} + +// AutocompleteUsersInTeam returns the users on a team based on search term. +func (c *Client4) AutocompleteUsersInTeam(ctx context.Context, teamId string, username string, limit int, etag string) (*UserAutocomplete, *Response, error) { + values := url.Values{} + values.Set("in_team", teamId) + values.Set("name", username) + values.Set("limit", strconv.Itoa(limit)) + r, err := c.DoAPIGet(ctx, c.usersRoute()+"/autocomplete?"+values.Encode(), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*UserAutocomplete](r) +} + +// AutocompleteUsersInChannel returns the users in a channel based on search term. +func (c *Client4) AutocompleteUsersInChannel(ctx context.Context, teamId string, channelId string, username string, limit int, etag string) (*UserAutocomplete, *Response, error) { + values := url.Values{} + values.Set("in_team", teamId) + values.Set("in_channel", channelId) + values.Set("name", username) + values.Set("limit", strconv.Itoa(limit)) + r, err := c.DoAPIGet(ctx, c.usersRoute()+"/autocomplete?"+values.Encode(), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*UserAutocomplete](r) +} + +// AutocompleteUsers returns the users in the system based on search term. +func (c *Client4) AutocompleteUsers(ctx context.Context, username string, limit int, etag string) (*UserAutocomplete, *Response, error) { + values := url.Values{} + values.Set("name", username) + values.Set("limit", strconv.Itoa(limit)) + r, err := c.DoAPIGet(ctx, c.usersRoute()+"/autocomplete?"+values.Encode(), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*UserAutocomplete](r) +} + +// GetDefaultProfileImage gets the default user's profile image. Must be logged in. +func (c *Client4) GetDefaultProfileImage(ctx context.Context, userId string) ([]byte, *Response, error) { + r, err := c.DoAPIGet(ctx, c.userRoute(userId)+"/image/default", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return ReadBytesFromResponse(r) +} + +// GetProfileImage gets user's profile image. Must be logged in. +func (c *Client4) GetProfileImage(ctx context.Context, userId, etag string) ([]byte, *Response, error) { + r, err := c.DoAPIGet(ctx, c.userRoute(userId)+"/image", etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return ReadBytesFromResponse(r) +} + +// GetUsers returns a page of users on the system. Page counting starts at 0. +func (c *Client4) GetUsers(ctx context.Context, page int, perPage int, etag string) ([]*User, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, c.usersRoute()+"?"+values.Encode(), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*User](r) +} + +// GetUsersWithCustomQueryParameters returns a page of users on the system. Page counting starts at 0. +func (c *Client4) GetUsersWithCustomQueryParameters(ctx context.Context, page int, perPage int, queryParameters, etag string) ([]*User, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, c.usersRoute()+"?"+values.Encode()+"&"+queryParameters, etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*User](r) +} + +// GetUsersInTeam returns a page of users on a team. Page counting starts at 0. +func (c *Client4) GetUsersInTeam(ctx context.Context, teamId string, page int, perPage int, etag string) ([]*User, *Response, error) { + values := url.Values{} + values.Set("in_team", teamId) + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, c.usersRoute()+"?"+values.Encode(), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*User](r) +} + +// GetNewUsersInTeam returns a page of users on a team. Page counting starts at 0. +func (c *Client4) GetNewUsersInTeam(ctx context.Context, teamId string, page int, perPage int, etag string) ([]*User, *Response, error) { + values := url.Values{} + values.Set("sort", "create_at") + values.Set("in_team", teamId) + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, c.usersRoute()+"?"+values.Encode(), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*User](r) +} + +// GetRecentlyActiveUsersInTeam returns a page of users on a team. Page counting starts at 0. +func (c *Client4) GetRecentlyActiveUsersInTeam(ctx context.Context, teamId string, page int, perPage int, etag string) ([]*User, *Response, error) { + values := url.Values{} + values.Set("sort", "last_activity_at") + values.Set("in_team", teamId) + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, c.usersRoute()+"?"+values.Encode(), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*User](r) +} + +// GetActiveUsersInTeam returns a page of users on a team. Page counting starts at 0. +func (c *Client4) GetActiveUsersInTeam(ctx context.Context, teamId string, page int, perPage int, etag string) ([]*User, *Response, error) { + values := url.Values{} + values.Set("active", "true") + values.Set("in_team", teamId) + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, c.usersRoute()+"?"+values.Encode(), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*User](r) +} + +// GetUsersNotInTeam returns a page of users who are not in a team. Page counting starts at 0. +func (c *Client4) GetUsersNotInTeam(ctx context.Context, teamId string, page int, perPage int, etag string) ([]*User, *Response, error) { + values := url.Values{} + values.Set("not_in_team", teamId) + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, c.usersRoute()+"?"+values.Encode(), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*User](r) +} + +// GetUsersInChannel returns a page of users in a channel. Page counting starts at 0. +func (c *Client4) GetUsersInChannel(ctx context.Context, channelId string, page int, perPage int, etag string) ([]*User, *Response, error) { + values := url.Values{} + values.Set("in_channel", channelId) + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, c.usersRoute()+"?"+values.Encode(), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*User](r) +} + +// GetUsersInChannelByStatus returns a page of users in a channel. Page counting starts at 0. Sorted by Status +func (c *Client4) GetUsersInChannelByStatus(ctx context.Context, channelId string, page int, perPage int, etag string) ([]*User, *Response, error) { + values := url.Values{} + values.Set("in_channel", channelId) + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + values.Set("sort", "status") + r, err := c.DoAPIGet(ctx, c.usersRoute()+"?"+values.Encode(), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*User](r) +} + +// GetUsersNotInChannel returns a page of users not in a channel. Page counting starts at 0. +func (c *Client4) GetUsersNotInChannel(ctx context.Context, teamId, channelId string, page int, perPage int, etag string) ([]*User, *Response, error) { + options := &GetUsersNotInChannelOptions{ + TeamID: teamId, + Page: page, + Limit: perPage, + Etag: etag, + CursorID: "", + } + return c.GetUsersNotInChannelWithOptions(ctx, channelId, options) +} + +// GetUsersNotInChannelWithOptionsStruct returns a page of users not in a channel using the options struct. +func (c *Client4) GetUsersNotInChannelWithOptions(ctx context.Context, channelId string, options *GetUsersNotInChannelOptions) ([]*User, *Response, error) { + values := url.Values{} + if options != nil { + values.Set("in_team", options.TeamID) + values.Set("not_in_channel", channelId) + values.Set("page", strconv.Itoa(options.Page)) + values.Set("per_page", strconv.Itoa(options.Limit)) + values.Set("cursor_id", options.CursorID) + } + r, err := c.DoAPIGet(ctx, c.usersRoute()+"?"+values.Encode(), options.Etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*User](r) +} + +// GetUsersWithoutTeam returns a page of users on the system that aren't on any teams. Page counting starts at 0. +func (c *Client4) GetUsersWithoutTeam(ctx context.Context, page int, perPage int, etag string) ([]*User, *Response, error) { + values := url.Values{} + values.Set("without_team", "1") + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, c.usersRoute()+"?"+values.Encode(), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*User](r) +} + +// GetUsersInGroup returns a page of users in a group. Page counting starts at 0. +func (c *Client4) GetUsersInGroup(ctx context.Context, groupID string, page int, perPage int, etag string) ([]*User, *Response, error) { + values := url.Values{} + values.Set("in_group", groupID) + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, c.usersRoute()+"?"+values.Encode(), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*User](r) +} + +// GetUsersInGroup returns a page of users in a group. Page counting starts at 0. +func (c *Client4) GetUsersInGroupByDisplayName(ctx context.Context, groupID string, page int, perPage int, etag string) ([]*User, *Response, error) { + values := url.Values{} + values.Set("sort", "display_name") + values.Set("in_group", groupID) + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, c.usersRoute()+"?"+values.Encode(), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*User](r) +} + +// GetUsersByIds returns a list of users based on the provided user ids. +func (c *Client4) GetUsersByIds(ctx context.Context, userIds []string) ([]*User, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.usersRoute()+"/ids", userIds) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*User](r) +} + +// GetUsersByIds returns a list of users based on the provided user ids. +func (c *Client4) GetUsersByIdsWithOptions(ctx context.Context, userIds []string, options *UserGetByIdsOptions) ([]*User, *Response, error) { + v := url.Values{} + if options.Since != 0 { + v.Set("since", fmt.Sprintf("%d", options.Since)) + } + + url := c.usersRoute() + "/ids" + if len(v) > 0 { + url += "?" + v.Encode() + } + + r, err := c.DoAPIPostJSON(ctx, url, userIds) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*User](r) +} + +// GetUsersByUsernames returns a list of users based on the provided usernames. +func (c *Client4) GetUsersByUsernames(ctx context.Context, usernames []string) ([]*User, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.usersRoute()+"/usernames", usernames) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*User](r) +} + +// GetUsersByGroupChannelIds returns a map with channel ids as keys +// and a list of users as values based on the provided user ids. +func (c *Client4) GetUsersByGroupChannelIds(ctx context.Context, groupChannelIds []string) (map[string][]*User, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.usersRoute()+"/group_channels", groupChannelIds) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[map[string][]*User](r) +} + +// SearchUsers returns a list of users based on some search criteria. +func (c *Client4) SearchUsers(ctx context.Context, search *UserSearch) ([]*User, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.usersRoute()+"/search", search) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*User](r) +} + +// UpdateUser updates a user in the system based on the provided user struct. +func (c *Client4) UpdateUser(ctx context.Context, user *User) (*User, *Response, error) { + r, err := c.DoAPIPutJSON(ctx, c.userRoute(user.Id), user) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*User](r) +} + +// PatchUser partially updates a user in the system. Any missing fields are not updated. +func (c *Client4) PatchUser(ctx context.Context, userId string, patch *UserPatch) (*User, *Response, error) { + r, err := c.DoAPIPutJSON(ctx, c.userRoute(userId)+"/patch", patch) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*User](r) +} + +// UpdateUserAuth updates a user AuthData (uthData, authService and password) in the system. +func (c *Client4) UpdateUserAuth(ctx context.Context, userId string, userAuth *UserAuth) (*UserAuth, *Response, error) { + r, err := c.DoAPIPutJSON(ctx, c.userRoute(userId)+"/auth", userAuth) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*UserAuth](r) +} + +// UpdateUserMfa activates multi-factor authentication for a user if activate +// is true and a valid code is provided. If activate is false, then code is not +// required and multi-factor authentication is disabled for the user. +func (c *Client4) UpdateUserMfa(ctx context.Context, userId, code string, activate bool) (*Response, error) { + requestBody := make(map[string]any) + requestBody["activate"] = activate + requestBody["code"] = code + + r, err := c.DoAPIPutJSON(ctx, c.userRoute(userId)+"/mfa", requestBody) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// GenerateMfaSecret will generate a new MFA secret for a user and return it as a string and +// as a base64 encoded image QR code. +func (c *Client4) GenerateMfaSecret(ctx context.Context, userId string) (*MfaSecret, *Response, error) { + r, err := c.DoAPIPost(ctx, c.userRoute(userId)+"/mfa/generate", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*MfaSecret](r) +} + +// UpdateUserPassword updates a user's password. Must be logged in as the user or be a system administrator. +func (c *Client4) UpdateUserPassword(ctx context.Context, userId, currentPassword, newPassword string) (*Response, error) { + requestBody := map[string]string{"current_password": currentPassword, "new_password": newPassword} + r, err := c.DoAPIPutJSON(ctx, c.userRoute(userId)+"/password", requestBody) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// UpdateUserHashedPassword updates a user's password with an already-hashed password. Must be a system administrator. +func (c *Client4) UpdateUserHashedPassword(ctx context.Context, userId, newHashedPassword string) (*Response, error) { + requestBody := map[string]string{"already_hashed": "true", "new_password": newHashedPassword} + r, err := c.DoAPIPutJSON(ctx, c.userRoute(userId)+"/password", requestBody) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// PromoteGuestToUser convert a guest into a regular user +func (c *Client4) PromoteGuestToUser(ctx context.Context, guestId string) (*Response, error) { + r, err := c.DoAPIPost(ctx, c.userRoute(guestId)+"/promote", "") + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// DemoteUserToGuest convert a regular user into a guest +func (c *Client4) DemoteUserToGuest(ctx context.Context, guestId string) (*Response, error) { + r, err := c.DoAPIPost(ctx, c.userRoute(guestId)+"/demote", "") + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// UpdateUserRoles updates a user's roles in the system. A user can have "system_user" and "system_admin" roles. +func (c *Client4) UpdateUserRoles(ctx context.Context, userId, roles string) (*Response, error) { + requestBody := map[string]string{"roles": roles} + r, err := c.DoAPIPutJSON(ctx, c.userRoute(userId)+"/roles", requestBody) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// UpdateUserActive updates status of a user whether active or not. +func (c *Client4) UpdateUserActive(ctx context.Context, userId string, active bool) (*Response, error) { + requestBody := make(map[string]any) + requestBody["active"] = active + r, err := c.DoAPIPutJSON(ctx, c.userRoute(userId)+"/active", requestBody) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + + return BuildResponse(r), nil +} + +// ResetFailedAttempts resets the number of failed attempts for a user. +func (c *Client4) ResetFailedAttempts(ctx context.Context, userId string) (*Response, error) { + r, err := c.DoAPIPost(ctx, c.userRoute(userId)+"/reset_failed_attempts", "") + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// DeleteUser deactivates a user in the system based on the provided user id string. +func (c *Client4) DeleteUser(ctx context.Context, userId string) (*Response, error) { + r, err := c.DoAPIDelete(ctx, c.userRoute(userId)) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// PermanentDeleteUser deletes a user in the system based on the provided user id string. +func (c *Client4) PermanentDeleteUser(ctx context.Context, userId string) (*Response, error) { + r, err := c.DoAPIDelete(ctx, c.userRoute(userId)+"?permanent="+c.boolString(true)) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// ConvertUserToBot converts a user to a bot user. +func (c *Client4) ConvertUserToBot(ctx context.Context, userId string) (*Bot, *Response, error) { + r, err := c.DoAPIPost(ctx, c.userRoute(userId)+"/convert_to_bot", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Bot](r) +} + +// ConvertBotToUser converts a bot user to a user. +func (c *Client4) ConvertBotToUser(ctx context.Context, userId string, userPatch *UserPatch, setSystemAdmin bool) (*User, *Response, error) { + var query string + if setSystemAdmin { + query = "?set_system_admin=true" + } + r, err := c.DoAPIPostJSON(ctx, c.botRoute(userId)+"/convert_to_user"+query, userPatch) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*User](r) +} + +// PermanentDeleteAll permanently deletes all users in the system. This is a local only endpoint +func (c *Client4) PermanentDeleteAllUsers(ctx context.Context) (*Response, error) { + r, err := c.DoAPIDelete(ctx, c.usersRoute()) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// SendPasswordResetEmail will send a link for password resetting to a user with the +// provided email. +func (c *Client4) SendPasswordResetEmail(ctx context.Context, email string) (*Response, error) { + requestBody := map[string]string{"email": email} + r, err := c.DoAPIPostJSON(ctx, c.usersRoute()+"/password/reset/send", requestBody) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// ResetPassword uses a recovery code to update reset a user's password. +func (c *Client4) ResetPassword(ctx context.Context, token, newPassword string) (*Response, error) { + requestBody := map[string]string{"token": token, "new_password": newPassword} + r, err := c.DoAPIPostJSON(ctx, c.usersRoute()+"/password/reset", requestBody) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// GetSessions returns a list of sessions based on the provided user id string. +func (c *Client4) GetSessions(ctx context.Context, userId, etag string) ([]*Session, *Response, error) { + r, err := c.DoAPIGet(ctx, c.userRoute(userId)+"/sessions", etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*Session](r) +} + +// RevokeSession revokes a user session based on the provided user id and session id strings. +func (c *Client4) RevokeSession(ctx context.Context, userId, sessionId string) (*Response, error) { + requestBody := map[string]string{"session_id": sessionId} + r, err := c.DoAPIPostJSON(ctx, c.userRoute(userId)+"/sessions/revoke", requestBody) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// RevokeAllSessions revokes all sessions for the provided user id string. +func (c *Client4) RevokeAllSessions(ctx context.Context, userId string) (*Response, error) { + r, err := c.DoAPIPost(ctx, c.userRoute(userId)+"/sessions/revoke/all", "") + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// RevokeAllSessions revokes all sessions for all the users. +func (c *Client4) RevokeSessionsFromAllUsers(ctx context.Context) (*Response, error) { + r, err := c.DoAPIPost(ctx, c.usersRoute()+"/sessions/revoke/all", "") + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// AttachDeviceProps attaches a mobile device ID to the current session and other props. +func (c *Client4) AttachDeviceProps(ctx context.Context, newProps map[string]string) (*Response, error) { + r, err := c.DoAPIPutJSON(ctx, c.usersRoute()+"/sessions/device", newProps) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// GetTeamsUnreadForUser will return an array with TeamUnread objects that contain the amount +// of unread messages and mentions the current user has for the teams it belongs to. +// An optional team ID can be set to exclude that team from the results. +// An optional boolean can be set to include collapsed thread unreads. Must be authenticated. +func (c *Client4) GetTeamsUnreadForUser(ctx context.Context, userId, teamIdToExclude string, includeCollapsedThreads bool) ([]*TeamUnread, *Response, error) { + values := url.Values{} + if teamIdToExclude != "" { + values.Set("exclude_team", teamIdToExclude) + } + values.Set("include_collapsed_threads", c.boolString(includeCollapsedThreads)) + r, err := c.DoAPIGet(ctx, c.userRoute(userId)+"/teams/unread?"+values.Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*TeamUnread](r) +} + +// GetUserAudits returns a list of audit based on the provided user id string. +func (c *Client4) GetUserAudits(ctx context.Context, userId string, page int, perPage int, etag string) (Audits, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, c.userRoute(userId)+"/audits?"+values.Encode(), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[Audits](r) +} + +// VerifyUserEmail will verify a user's email using the supplied token. +func (c *Client4) VerifyUserEmail(ctx context.Context, token string) (*Response, error) { + requestBody := map[string]string{"token": token} + r, err := c.DoAPIPostJSON(ctx, c.usersRoute()+"/email/verify", requestBody) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// VerifyUserEmailWithoutToken will verify a user's email by its Id. (Requires manage system role) +func (c *Client4) VerifyUserEmailWithoutToken(ctx context.Context, userId string) (*User, *Response, error) { + r, err := c.DoAPIPost(ctx, c.userRoute(userId)+"/email/verify/member", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*User](r) +} + +// SendVerificationEmail will send an email to the user with the provided email address, if +// that user exists. The email will contain a link that can be used to verify the user's +// email address. +func (c *Client4) SendVerificationEmail(ctx context.Context, email string) (*Response, error) { + requestBody := map[string]string{"email": email} + r, err := c.DoAPIPostJSON(ctx, c.usersRoute()+"/email/verify/send", requestBody) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// SetDefaultProfileImage resets the profile image to a default generated one. +func (c *Client4) SetDefaultProfileImage(ctx context.Context, userId string) (*Response, error) { + r, err := c.DoAPIDelete(ctx, c.userRoute(userId)+"/image") + if err != nil { + return BuildResponse(r), err + } + return BuildResponse(r), nil +} + +// SetProfileImage sets profile image of the user. +func (c *Client4) SetProfileImage(ctx context.Context, userId string, data []byte) (*Response, error) { + body := &bytes.Buffer{} + writer := multipart.NewWriter(body) + + part, err := writer.CreateFormFile("image", "profile.png") + if err != nil { + return nil, fmt.Errorf("failed to create form file: %w", err) + } + + if _, err = io.Copy(part, bytes.NewBuffer(data)); err != nil { + return nil, fmt.Errorf("failed to copy data to form file: %w", err) + } + + if err = writer.Close(); err != nil { + return nil, fmt.Errorf("failed to close multipart writer: %w", err) + } + + r, err := c.doAPIRequestReader(ctx, http.MethodPost, c.APIURL+c.userRoute(userId)+"/image", writer.FormDataContentType(), body, nil) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + + return BuildResponse(r), nil +} + +// CreateUserAccessToken will generate a user access token that can be used in place +// of a session token to access the REST API. Must have the 'create_user_access_token' +// permission and if generating for another user, must have the 'edit_other_users' +// permission. A non-blank description is required. +func (c *Client4) CreateUserAccessToken(ctx context.Context, userId, description string) (*UserAccessToken, *Response, error) { + requestBody := map[string]string{"description": description} + r, err := c.DoAPIPostJSON(ctx, c.userRoute(userId)+"/tokens", requestBody) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*UserAccessToken](r) +} + +// GetUserAccessTokens will get a page of access tokens' id, description, is_active +// and the user_id in the system. The actual token will not be returned. Must have +// the 'manage_system' permission. +func (c *Client4) GetUserAccessTokens(ctx context.Context, page int, perPage int) ([]*UserAccessToken, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, c.userAccessTokensRoute()+"?"+values.Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*UserAccessToken](r) +} + +// GetUserAccessToken will get a user access tokens' id, description, is_active +// and the user_id of the user it is for. The actual token will not be returned. +// Must have the 'read_user_access_token' permission and if getting for another +// user, must have the 'edit_other_users' permission. +func (c *Client4) GetUserAccessToken(ctx context.Context, tokenId string) (*UserAccessToken, *Response, error) { + r, err := c.DoAPIGet(ctx, c.userAccessTokenRoute(tokenId), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*UserAccessToken](r) +} + +// GetUserAccessTokensForUser will get a paged list of user access tokens showing id, +// description and user_id for each. The actual tokens will not be returned. Must have +// the 'read_user_access_token' permission and if getting for another user, must have the +// 'edit_other_users' permission. +func (c *Client4) GetUserAccessTokensForUser(ctx context.Context, userId string, page, perPage int) ([]*UserAccessToken, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, c.userRoute(userId)+"/tokens?"+values.Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*UserAccessToken](r) +} + +// RevokeUserAccessToken will revoke a user access token by id. Must have the +// 'revoke_user_access_token' permission and if revoking for another user, must have the +// 'edit_other_users' permission. +func (c *Client4) RevokeUserAccessToken(ctx context.Context, tokenId string) (*Response, error) { + requestBody := map[string]string{"token_id": tokenId} + r, err := c.DoAPIPostJSON(ctx, c.usersRoute()+"/tokens/revoke", requestBody) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// SearchUserAccessTokens returns user access tokens matching the provided search term. +func (c *Client4) SearchUserAccessTokens(ctx context.Context, search *UserAccessTokenSearch) ([]*UserAccessToken, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.usersRoute()+"/tokens/search", search) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*UserAccessToken](r) +} + +// DisableUserAccessToken will disable a user access token by id. Must have the +// 'revoke_user_access_token' permission and if disabling for another user, must have the +// 'edit_other_users' permission. +func (c *Client4) DisableUserAccessToken(ctx context.Context, tokenId string) (*Response, error) { + requestBody := map[string]string{"token_id": tokenId} + r, err := c.DoAPIPostJSON(ctx, c.usersRoute()+"/tokens/disable", requestBody) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// EnableUserAccessToken will enable a user access token by id. Must have the +// 'create_user_access_token' permission and if enabling for another user, must have the +// 'edit_other_users' permission. +func (c *Client4) EnableUserAccessToken(ctx context.Context, tokenId string) (*Response, error) { + requestBody := map[string]string{"token_id": tokenId} + r, err := c.DoAPIPostJSON(ctx, c.usersRoute()+"/tokens/enable", requestBody) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +func (c *Client4) GetUsersForReporting(ctx context.Context, options *UserReportOptions) ([]*UserReport, *Response, error) { + values := url.Values{} + if options.Direction != "" { + values.Set("direction", options.Direction) + } + if options.SortColumn != "" { + values.Set("sort_column", options.SortColumn) + } + if options.PageSize > 0 { + values.Set("page_size", strconv.Itoa(options.PageSize)) + } + if options.Team != "" { + values.Set("team_filter", options.Team) + } + if options.HideActive { + values.Set("hide_active", "true") + } + if options.HideInactive { + values.Set("hide_inactive", "true") + } + if options.SortDesc { + values.Set("sort_direction", "desc") + } + if options.FromColumnValue != "" { + values.Set("from_column_value", options.FromColumnValue) + } + if options.FromId != "" { + values.Set("from_id", options.FromId) + } + if options.Role != "" { + values.Set("role_filter", options.Role) + } + if options.HasNoTeam { + values.Set("has_no_team", "true") + } + if options.DateRange != "" { + values.Set("date_range", options.DateRange) + } + + r, err := c.DoAPIGet(ctx, c.reportsRoute()+"/users?"+values.Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*UserReport](r) +} + +// Bots section + +// CreateBot creates a bot in the system based on the provided bot struct. +func (c *Client4) CreateBot(ctx context.Context, bot *Bot) (*Bot, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.botsRoute(), bot) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Bot](r) +} + +// PatchBot partially updates a bot. Any missing fields are not updated. +func (c *Client4) PatchBot(ctx context.Context, userId string, patch *BotPatch) (*Bot, *Response, error) { + r, err := c.DoAPIPutJSON(ctx, c.botRoute(userId), patch) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Bot](r) +} + +// GetBot fetches the given, undeleted bot. +func (c *Client4) GetBot(ctx context.Context, userId string, etag string) (*Bot, *Response, error) { + r, err := c.DoAPIGet(ctx, c.botRoute(userId), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Bot](r) +} + +// GetBotIncludeDeleted fetches the given bot, even if it is deleted. +func (c *Client4) GetBotIncludeDeleted(ctx context.Context, userId string, etag string) (*Bot, *Response, error) { + r, err := c.DoAPIGet(ctx, c.botRoute(userId)+"?include_deleted="+c.boolString(true), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Bot](r) +} + +// GetBots fetches the given page of bots, excluding deleted. +func (c *Client4) GetBots(ctx context.Context, page, perPage int, etag string) ([]*Bot, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, c.botsRoute()+"?"+values.Encode(), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[BotList](r) +} + +// GetBotsIncludeDeleted fetches the given page of bots, including deleted. +func (c *Client4) GetBotsIncludeDeleted(ctx context.Context, page, perPage int, etag string) ([]*Bot, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + values.Set("include_deleted", c.boolString(true)) + r, err := c.DoAPIGet(ctx, c.botsRoute()+"?"+values.Encode(), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[BotList](r) +} + +// GetBotsOrphaned fetches the given page of bots, only including orphaned bots. +func (c *Client4) GetBotsOrphaned(ctx context.Context, page, perPage int, etag string) ([]*Bot, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + values.Set("only_orphaned", c.boolString(true)) + r, err := c.DoAPIGet(ctx, c.botsRoute()+"?"+values.Encode(), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[BotList](r) +} + +// DisableBot disables the given bot in the system. +func (c *Client4) DisableBot(ctx context.Context, botUserId string) (*Bot, *Response, error) { + r, err := c.DoAPIPost(ctx, c.botRoute(botUserId)+"/disable", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Bot](r) +} + +// EnableBot disables the given bot in the system. +func (c *Client4) EnableBot(ctx context.Context, botUserId string) (*Bot, *Response, error) { + r, err := c.DoAPIPost(ctx, c.botRoute(botUserId)+"/enable", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Bot](r) +} + +// AssignBot assigns the given bot to the given user +func (c *Client4) AssignBot(ctx context.Context, botUserId, newOwnerId string) (*Bot, *Response, error) { + r, err := c.DoAPIPost(ctx, c.botRoute(botUserId)+"/assign/"+newOwnerId, "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Bot](r) +} + +// Team Section + +// CreateTeam creates a team in the system based on the provided team struct. +func (c *Client4) CreateTeam(ctx context.Context, team *Team) (*Team, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.teamsRoute(), team) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Team](r) +} + +// GetTeam returns a team based on the provided team id string. +func (c *Client4) GetTeam(ctx context.Context, teamId, etag string) (*Team, *Response, error) { + r, err := c.DoAPIGet(ctx, c.teamRoute(teamId), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Team](r) +} + +// GetTeamAsContentReviewer returns a team based on the provided team id string, fetching it as a Content Reviewer for a flagged post. +func (c *Client4) GetTeamAsContentReviewer(ctx context.Context, teamId, etag, flaggedPostId string) (*Team, *Response, error) { + values := url.Values{} + values.Set(AsContentReviewerParam, c.boolString(true)) + values.Set("flagged_post_id", flaggedPostId) + + route := c.teamRoute(teamId) + "?" + values.Encode() + r, err := c.DoAPIGet(ctx, route, etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Team](r) +} + +// GetAllTeams returns all teams based on permissions. +func (c *Client4) GetAllTeams(ctx context.Context, etag string, page int, perPage int) ([]*Team, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, c.teamsRoute()+"?"+values.Encode(), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*Team](r) +} + +// GetAllTeamsWithTotalCount returns all teams based on permissions. +func (c *Client4) GetAllTeamsWithTotalCount(ctx context.Context, etag string, page int, perPage int) ([]*Team, int64, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + values.Set("include_total_count", c.boolString(true)) + r, err := c.DoAPIGet(ctx, c.teamsRoute()+"?"+values.Encode(), etag) + if err != nil { + return nil, 0, BuildResponse(r), err + } + defer closeBody(r) + listWithCount, resp, err := DecodeJSONFromResponse[TeamsWithCount](r) + if err != nil { + return nil, 0, resp, err + } + return listWithCount.Teams, listWithCount.TotalCount, resp, nil +} + +// GetAllTeamsExcludePolicyConstrained returns all teams which are not part of a data retention policy. +// Must be a system administrator. +func (c *Client4) GetAllTeamsExcludePolicyConstrained(ctx context.Context, etag string, page int, perPage int) ([]*Team, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + values.Set("exclude_policy_constrained", c.boolString(true)) + r, err := c.DoAPIGet(ctx, c.teamsRoute()+"?"+values.Encode(), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*Team](r) +} + +// GetTeamByName returns a team based on the provided team name string. +func (c *Client4) GetTeamByName(ctx context.Context, name, etag string) (*Team, *Response, error) { + r, err := c.DoAPIGet(ctx, c.teamByNameRoute(name), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Team](r) +} + +// SearchTeams returns teams matching the provided search term. +func (c *Client4) SearchTeams(ctx context.Context, search *TeamSearch) ([]*Team, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.teamsRoute()+"/search", search) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*Team](r) +} + +// SearchTeamsPaged returns a page of teams and the total count matching the provided search term. +func (c *Client4) SearchTeamsPaged(ctx context.Context, search *TeamSearch) ([]*Team, int64, *Response, error) { + if search.Page == nil { + search.Page = NewPointer(0) + } + if search.PerPage == nil { + search.PerPage = NewPointer(100) + } + r, err := c.DoAPIPostJSON(ctx, c.teamsRoute()+"/search", search) + if err != nil { + return nil, 0, BuildResponse(r), err + } + defer closeBody(r) + listWithCount, resp, err := DecodeJSONFromResponse[TeamsWithCount](r) + if err != nil { + return nil, 0, resp, err + } + return listWithCount.Teams, listWithCount.TotalCount, resp, nil +} + +// TeamExists returns true or false if the team exist or not. +func (c *Client4) TeamExists(ctx context.Context, name, etag string) (bool, *Response, error) { + r, err := c.DoAPIGet(ctx, c.teamByNameRoute(name)+"/exists", etag) + if err != nil { + return false, BuildResponse(r), err + } + defer closeBody(r) + return MapBoolFromJSON(r.Body)["exists"], BuildResponse(r), nil +} + +// GetTeamsForUser returns a list of teams a user is on. Must be logged in as the user +// or be a system administrator. +func (c *Client4) GetTeamsForUser(ctx context.Context, userId, etag string) ([]*Team, *Response, error) { + r, err := c.DoAPIGet(ctx, c.userRoute(userId)+"/teams", etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*Team](r) +} + +// GetTeamMember returns a team member based on the provided team and user id strings. +func (c *Client4) GetTeamMember(ctx context.Context, teamId, userId, etag string) (*TeamMember, *Response, error) { + r, err := c.DoAPIGet(ctx, c.teamMemberRoute(teamId, userId), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*TeamMember](r) +} + +// UpdateTeamMemberRoles will update the roles on a team for a user. +func (c *Client4) UpdateTeamMemberRoles(ctx context.Context, teamId, userId, newRoles string) (*Response, error) { + requestBody := map[string]string{"roles": newRoles} + r, err := c.DoAPIPutJSON(ctx, c.teamMemberRoute(teamId, userId)+"/roles", requestBody) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// UpdateTeamMemberSchemeRoles will update the scheme-derived roles on a team for a user. +func (c *Client4) UpdateTeamMemberSchemeRoles(ctx context.Context, teamId string, userId string, schemeRoles *SchemeRoles) (*Response, error) { + r, err := c.DoAPIPutJSON(ctx, c.teamMemberRoute(teamId, userId)+"/schemeRoles", schemeRoles) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// UpdateTeam will update a team. +func (c *Client4) UpdateTeam(ctx context.Context, team *Team) (*Team, *Response, error) { + r, err := c.DoAPIPutJSON(ctx, c.teamRoute(team.Id), team) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Team](r) +} + +// PatchTeam partially updates a team. Any missing fields are not updated. +func (c *Client4) PatchTeam(ctx context.Context, teamId string, patch *TeamPatch) (*Team, *Response, error) { + r, err := c.DoAPIPutJSON(ctx, c.teamRoute(teamId)+"/patch", patch) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Team](r) +} + +// RestoreTeam restores a previously deleted team. +func (c *Client4) RestoreTeam(ctx context.Context, teamId string) (*Team, *Response, error) { + r, err := c.DoAPIPost(ctx, c.teamRoute(teamId)+"/restore", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Team](r) +} + +// RegenerateTeamInviteId requests a new invite ID to be generated. +func (c *Client4) RegenerateTeamInviteId(ctx context.Context, teamId string) (*Team, *Response, error) { + r, err := c.DoAPIPost(ctx, c.teamRoute(teamId)+"/regenerate_invite_id", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Team](r) +} + +// SoftDeleteTeam deletes the team softly (archive only, not permanent delete). +func (c *Client4) SoftDeleteTeam(ctx context.Context, teamId string) (*Response, error) { + r, err := c.DoAPIDelete(ctx, c.teamRoute(teamId)) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// PermanentDeleteTeam deletes the team, should only be used when needed for +// compliance and the like. +func (c *Client4) PermanentDeleteTeam(ctx context.Context, teamId string) (*Response, error) { + r, err := c.DoAPIDelete(ctx, c.teamRoute(teamId)+"?permanent="+c.boolString(true)) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// UpdateTeamPrivacy modifies the team type (model.TeamOpen <--> model.TeamInvite) and sets +// the corresponding AllowOpenInvite appropriately. +func (c *Client4) UpdateTeamPrivacy(ctx context.Context, teamId string, privacy string) (*Team, *Response, error) { + requestBody := map[string]string{"privacy": privacy} + r, err := c.DoAPIPutJSON(ctx, c.teamRoute(teamId)+"/privacy", requestBody) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Team](r) +} + +// GetTeamMembers returns team members based on the provided team id string. +func (c *Client4) GetTeamMembers(ctx context.Context, teamId string, page int, perPage int, etag string) ([]*TeamMember, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, c.teamMembersRoute(teamId)+"?"+values.Encode(), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*TeamMember](r) +} + +// GetTeamMembersWithoutDeletedUsers returns team members based on the provided team id string. Additional parameters of sort and exclude_deleted_users accepted as well +// Could not add it to above function due to it be a breaking change. +func (c *Client4) GetTeamMembersSortAndWithoutDeletedUsers(ctx context.Context, teamId string, page int, perPage int, sort string, excludeDeletedUsers bool, etag string) ([]*TeamMember, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + values.Set("sort", sort) + values.Set("exclude_deleted_users", c.boolString(excludeDeletedUsers)) + r, err := c.DoAPIGet(ctx, c.teamMembersRoute(teamId)+"?"+values.Encode(), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*TeamMember](r) +} + +// GetTeamMembersForUser returns the team members for a user. +func (c *Client4) GetTeamMembersForUser(ctx context.Context, userId string, etag string) ([]*TeamMember, *Response, error) { + r, err := c.DoAPIGet(ctx, c.userRoute(userId)+"/teams/members", etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*TeamMember](r) +} + +// GetTeamMembersByIds will return an array of team members based on the +// team id and a list of user ids provided. Must be authenticated. +func (c *Client4) GetTeamMembersByIds(ctx context.Context, teamId string, userIds []string) ([]*TeamMember, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, fmt.Sprintf("/teams/%v/members/ids", teamId), userIds) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*TeamMember](r) +} + +// AddTeamMember adds user to a team and return a team member. +func (c *Client4) AddTeamMember(ctx context.Context, teamId, userId string) (*TeamMember, *Response, error) { + member := &TeamMember{TeamId: teamId, UserId: userId} + r, err := c.DoAPIPostJSON(ctx, c.teamMembersRoute(teamId), member) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*TeamMember](r) +} + +// AddTeamMemberFromInvite adds a user to a team and return a team member using an invite id +// or an invite token/data pair. +func (c *Client4) AddTeamMemberFromInvite(ctx context.Context, token, inviteId string) (*TeamMember, *Response, error) { + values := url.Values{} + values.Set("invite_id", inviteId) + values.Set("token", token) + r, err := c.DoAPIPost(ctx, c.teamsRoute()+"/members/invite"+"?"+values.Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*TeamMember](r) +} + +// AddTeamMembers adds a number of users to a team and returns the team members. +func (c *Client4) AddTeamMembers(ctx context.Context, teamId string, userIds []string) ([]*TeamMember, *Response, error) { + var members []*TeamMember + for _, userId := range userIds { + member := &TeamMember{TeamId: teamId, UserId: userId} + members = append(members, member) + } + r, err := c.DoAPIPostJSON(ctx, c.teamMembersRoute(teamId)+"/batch", members) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*TeamMember](r) +} + +// AddTeamMembers adds a number of users to a team and returns the team members. +func (c *Client4) AddTeamMembersGracefully(ctx context.Context, teamId string, userIds []string) ([]*TeamMemberWithError, *Response, error) { + var members []*TeamMember + for _, userId := range userIds { + member := &TeamMember{TeamId: teamId, UserId: userId} + members = append(members, member) + } + r, err := c.DoAPIPostJSON(ctx, c.teamMembersRoute(teamId)+"/batch?graceful="+c.boolString(true), members) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*TeamMemberWithError](r) +} + +// RemoveTeamMember will remove a user from a team. +func (c *Client4) RemoveTeamMember(ctx context.Context, teamId, userId string) (*Response, error) { + r, err := c.DoAPIDelete(ctx, c.teamMemberRoute(teamId, userId)) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// GetTeamStats returns a team stats based on the team id string. +// Must be authenticated. +func (c *Client4) GetTeamStats(ctx context.Context, teamId, etag string) (*TeamStats, *Response, error) { + r, err := c.DoAPIGet(ctx, c.teamStatsRoute(teamId), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*TeamStats](r) +} + +// GetTotalUsersStats returns a total system user stats. +// Must be authenticated. +func (c *Client4) GetTotalUsersStats(ctx context.Context, etag string) (*UsersStats, *Response, error) { + r, err := c.DoAPIGet(ctx, c.totalUsersStatsRoute(), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*UsersStats](r) +} + +// GetTeamUnread will return a TeamUnread object that contains the amount of +// unread messages and mentions the user has for the specified team. +// Must be authenticated. +func (c *Client4) GetTeamUnread(ctx context.Context, teamId, userId string) (*TeamUnread, *Response, error) { + r, err := c.DoAPIGet(ctx, c.userRoute(userId)+c.teamRoute(teamId)+"/unread", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*TeamUnread](r) +} + +// ImportTeam will import an exported team from other app into a existing team. +func (c *Client4) ImportTeam(ctx context.Context, data []byte, filesize int, importFrom, filename, teamId string) (map[string]string, *Response, error) { + body := &bytes.Buffer{} + writer := multipart.NewWriter(body) + + part, err := writer.CreateFormFile("file", filename) + if err != nil { + return nil, nil, err + } + + if _, err = io.Copy(part, bytes.NewBuffer(data)); err != nil { + return nil, nil, err + } + + part, err = writer.CreateFormField("filesize") + if err != nil { + return nil, nil, err + } + + if _, err = io.Copy(part, strings.NewReader(strconv.Itoa(filesize))); err != nil { + return nil, nil, err + } + + part, err = writer.CreateFormField("importFrom") + if err != nil { + return nil, nil, err + } + + if _, err = io.Copy(part, strings.NewReader(importFrom)); err != nil { + return nil, nil, err + } + + if err = writer.Close(); err != nil { + return nil, nil, err + } + + r, err := c.doAPIRequestReader(ctx, http.MethodPost, c.APIURL+c.teamImportRoute(teamId), writer.FormDataContentType(), body, nil) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[map[string]string](r) +} + +// InviteUsersToTeam invite users by email to the team. +func (c *Client4) InviteUsersToTeam(ctx context.Context, teamId string, userEmails []string) (*Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.teamRoute(teamId)+"/invite/email", userEmails) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// InviteGuestsToTeam invite guest by email to some channels in a team. +func (c *Client4) InviteGuestsToTeam(ctx context.Context, teamId string, userEmails []string, channels []string, message string) (*Response, error) { + guestsInvite := GuestsInvite{ + Emails: userEmails, + Channels: channels, + Message: message, + } + r, err := c.DoAPIPostJSON(ctx, c.teamRoute(teamId)+"/invite-guests/email", guestsInvite) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// InviteUsersToTeam invite users by email to the team. +func (c *Client4) InviteUsersToTeamGracefully(ctx context.Context, teamId string, userEmails []string) ([]*EmailInviteWithError, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.teamRoute(teamId)+"/invite/email?graceful="+c.boolString(true), userEmails) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*EmailInviteWithError](r) +} + +// InviteUsersToTeam invite users by email to the team. +func (c *Client4) InviteUsersToTeamAndChannelsGracefully(ctx context.Context, teamId string, userEmails []string, channelIds []string, message string) ([]*EmailInviteWithError, *Response, error) { + memberInvite := MemberInvite{ + Emails: userEmails, + ChannelIds: channelIds, + Message: message, + } + r, err := c.DoAPIPostJSON(ctx, c.teamRoute(teamId)+"/invite/email?graceful="+c.boolString(true), memberInvite) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*EmailInviteWithError](r) +} + +// InviteGuestsToTeam invite guest by email to some channels in a team. +func (c *Client4) InviteGuestsToTeamGracefully(ctx context.Context, teamId string, userEmails []string, channels []string, message string) ([]*EmailInviteWithError, *Response, error) { + guestsInvite := GuestsInvite{ + Emails: userEmails, + Channels: channels, + Message: message, + } + r, err := c.DoAPIPostJSON(ctx, c.teamRoute(teamId)+"/invite-guests/email?graceful="+c.boolString(true), guestsInvite) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*EmailInviteWithError](r) +} + +// InvalidateEmailInvites will invalidate active email invitations that have not been accepted by the user. +func (c *Client4) InvalidateEmailInvites(ctx context.Context) (*Response, error) { + r, err := c.DoAPIDelete(ctx, c.teamsRoute()+"/invites/email") + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// GetTeamInviteInfo returns a team object from an invite id containing sanitized information. +func (c *Client4) GetTeamInviteInfo(ctx context.Context, inviteId string) (*Team, *Response, error) { + r, err := c.DoAPIGet(ctx, c.teamsRoute()+"/invite/"+inviteId, "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Team](r) +} + +// SetTeamIcon sets team icon of the team. +func (c *Client4) SetTeamIcon(ctx context.Context, teamId string, data []byte) (*Response, error) { + body := &bytes.Buffer{} + writer := multipart.NewWriter(body) + + part, err := writer.CreateFormFile("image", "teamIcon.png") + if err != nil { + return nil, fmt.Errorf("failed to create form file for team icon: %w", err) + } + + if _, err = io.Copy(part, bytes.NewBuffer(data)); err != nil { + return nil, fmt.Errorf("failed to copy data to team icon form file: %w", err) + } + + if err = writer.Close(); err != nil { + return nil, fmt.Errorf("failed to close multipart writer for team icon: %w", err) + } + + r, err := c.doAPIRequestReader(ctx, http.MethodPost, c.APIURL+c.teamRoute(teamId)+"/image", writer.FormDataContentType(), body, nil) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// GetTeamIcon gets the team icon of the team. +func (c *Client4) GetTeamIcon(ctx context.Context, teamId, etag string) ([]byte, *Response, error) { + r, err := c.DoAPIGet(ctx, c.teamRoute(teamId)+"/image", etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return ReadBytesFromResponse(r) +} + +// RemoveTeamIcon updates LastTeamIconUpdate to 0 which indicates team icon is removed. +func (c *Client4) RemoveTeamIcon(ctx context.Context, teamId string) (*Response, error) { + r, err := c.DoAPIDelete(ctx, c.teamRoute(teamId)+"/image") + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// Channel Section + +// GetAllChannels get all the channels. Must be a system administrator. +func (c *Client4) GetAllChannels(ctx context.Context, page int, perPage int, etag string) (ChannelListWithTeamData, *Response, error) { + return c.getAllChannels(ctx, page, perPage, etag, ChannelSearchOpts{}) +} + +// GetAllChannelsIncludeDeleted get all the channels. Must be a system administrator. +func (c *Client4) GetAllChannelsIncludeDeleted(ctx context.Context, page int, perPage int, etag string) (ChannelListWithTeamData, *Response, error) { + return c.getAllChannels(ctx, page, perPage, etag, ChannelSearchOpts{IncludeDeleted: true}) +} + +// GetAllChannelsExcludePolicyConstrained gets all channels which are not part of a data retention policy. +// Must be a system administrator. +func (c *Client4) GetAllChannelsExcludePolicyConstrained(ctx context.Context, page, perPage int, etag string) (ChannelListWithTeamData, *Response, error) { + return c.getAllChannels(ctx, page, perPage, etag, ChannelSearchOpts{ExcludePolicyConstrained: true}) +} + +func (c *Client4) getAllChannels(ctx context.Context, page int, perPage int, etag string, opts ChannelSearchOpts) (ChannelListWithTeamData, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + values.Set("include_deleted", c.boolString(opts.IncludeDeleted)) + values.Set("exclude_policy_constrained", c.boolString(opts.ExcludePolicyConstrained)) + r, err := c.DoAPIGet(ctx, c.channelsRoute()+"?"+values.Encode(), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[ChannelListWithTeamData](r) +} + +// GetAllChannelsWithCount get all the channels including the total count. Must be a system administrator. +func (c *Client4) GetAllChannelsWithCount(ctx context.Context, page int, perPage int, etag string) (ChannelListWithTeamData, int64, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + values.Set("include_total_count", c.boolString(true)) + r, err := c.DoAPIGet(ctx, c.channelsRoute()+"?"+values.Encode(), etag) + if err != nil { + return nil, 0, BuildResponse(r), err + } + defer closeBody(r) + + cwc, resp, err := DecodeJSONFromResponse[*ChannelsWithCount](r) + if err != nil { + return nil, 0, resp, err + } + return cwc.Channels, cwc.TotalCount, resp, nil +} + +// CreateChannel creates a channel based on the provided channel struct. +func (c *Client4) CreateChannel(ctx context.Context, channel *Channel) (*Channel, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.channelsRoute(), channel) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Channel](r) +} + +// UpdateChannel updates a channel based on the provided channel struct. +func (c *Client4) UpdateChannel(ctx context.Context, channel *Channel) (*Channel, *Response, error) { + r, err := c.DoAPIPutJSON(ctx, c.channelRoute(channel.Id), channel) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Channel](r) +} + +// PatchChannel partially updates a channel. Any missing fields are not updated. +func (c *Client4) PatchChannel(ctx context.Context, channelId string, patch *ChannelPatch) (*Channel, *Response, error) { + r, err := c.DoAPIPutJSON(ctx, c.channelRoute(channelId)+"/patch", patch) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Channel](r) +} + +// UpdateChannelPrivacy updates channel privacy +func (c *Client4) UpdateChannelPrivacy(ctx context.Context, channelId string, privacy ChannelType) (*Channel, *Response, error) { + requestBody := map[string]string{"privacy": string(privacy)} + r, err := c.DoAPIPutJSON(ctx, c.channelRoute(channelId)+"/privacy", requestBody) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Channel](r) +} + +// RestoreChannel restores a previously deleted channel. Any missing fields are not updated. +func (c *Client4) RestoreChannel(ctx context.Context, channelId string) (*Channel, *Response, error) { + r, err := c.DoAPIPost(ctx, c.channelRoute(channelId)+"/restore", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Channel](r) +} + +// CreateDirectChannel creates a direct message channel based on the two user +// ids provided. +func (c *Client4) CreateDirectChannel(ctx context.Context, userId1, userId2 string) (*Channel, *Response, error) { + requestBody := []string{userId1, userId2} + r, err := c.DoAPIPostJSON(ctx, c.channelsRoute()+"/direct", requestBody) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Channel](r) +} + +// CreateGroupChannel creates a group message channel based on userIds provided. +func (c *Client4) CreateGroupChannel(ctx context.Context, userIds []string) (*Channel, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.channelsRoute()+"/group", userIds) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Channel](r) +} + +// GetChannel returns a channel based on the provided channel id string. +func (c *Client4) GetChannel(ctx context.Context, channelId, etag string) (*Channel, *Response, error) { + r, err := c.DoAPIGet(ctx, c.channelRoute(channelId), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Channel](r) +} + +// GetChannelAsContentReviewer returns a channel based on the provided channel id string, fetching it as a Content Reviewer for a flagged post. +func (c *Client4) GetChannelAsContentReviewer(ctx context.Context, channelId, etag, flaggedPostId string) (*Channel, *Response, error) { + values := url.Values{} + values.Set(AsContentReviewerParam, c.boolString(true)) + values.Set("flagged_post_id", flaggedPostId) + + route := c.channelRoute(channelId) + "?" + values.Encode() + r, err := c.DoAPIGet(ctx, route, etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Channel](r) +} + +// GetChannelStats returns statistics for a channel. +func (c *Client4) GetChannelStats(ctx context.Context, channelId string, etag string, excludeFilesCount bool) (*ChannelStats, *Response, error) { + values := url.Values{} + values.Set("exclude_files_count", c.boolString(excludeFilesCount)) + route := c.channelRoute(channelId) + "/stats?" + values.Encode() + r, err := c.DoAPIGet(ctx, route, etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*ChannelStats](r) +} + +// GetChannelsMemberCount get channel member count for a given array of channel ids +func (c *Client4) GetChannelsMemberCount(ctx context.Context, channelIDs []string) (map[string]int64, *Response, error) { + route := c.channelsRoute() + "/stats/member_count" + r, err := c.DoAPIPostJSON(ctx, route, channelIDs) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[map[string]int64](r) +} + +// GetChannelMembersTimezones gets a list of timezones for a channel. +func (c *Client4) GetChannelMembersTimezones(ctx context.Context, channelId string) ([]string, *Response, error) { + r, err := c.DoAPIGet(ctx, c.channelRoute(channelId)+"/timezones", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]string](r) +} + +// GetPinnedPosts gets a list of pinned posts. +func (c *Client4) GetPinnedPosts(ctx context.Context, channelId string, etag string) (*PostList, *Response, error) { + r, err := c.DoAPIGet(ctx, c.channelRoute(channelId)+"/pinned", etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*PostList](r) +} + +// GetPrivateChannelsForTeam returns a list of private channels based on the provided team id string. +func (c *Client4) GetPrivateChannelsForTeam(ctx context.Context, teamId string, page int, perPage int, etag string) ([]*Channel, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, c.channelsForTeamRoute(teamId)+"/private?"+values.Encode(), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*Channel](r) +} + +// GetPublicChannelsForTeam returns a list of public channels based on the provided team id string. +func (c *Client4) GetPublicChannelsForTeam(ctx context.Context, teamId string, page int, perPage int, etag string) ([]*Channel, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, c.channelsForTeamRoute(teamId)+"?"+values.Encode(), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*Channel](r) +} + +// GetDeletedChannelsForTeam returns a list of public channels based on the provided team id string. +func (c *Client4) GetDeletedChannelsForTeam(ctx context.Context, teamId string, page int, perPage int, etag string) ([]*Channel, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, c.channelsForTeamRoute(teamId)+"/deleted?"+values.Encode(), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*Channel](r) +} + +// GetPublicChannelsByIdsForTeam returns a list of public channels based on provided team id string. +func (c *Client4) GetPublicChannelsByIdsForTeam(ctx context.Context, teamId string, channelIds []string) ([]*Channel, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.channelsForTeamRoute(teamId)+"/ids", channelIds) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*Channel](r) +} + +// GetChannelsForTeamForUser returns a list channels of on a team for a user. +func (c *Client4) GetChannelsForTeamForUser(ctx context.Context, teamId, userId string, includeDeleted bool, etag string) ([]*Channel, *Response, error) { + values := url.Values{} + values.Set("include_deleted", c.boolString(includeDeleted)) + r, err := c.DoAPIGet(ctx, c.channelsForTeamForUserRoute(teamId, userId)+"?"+values.Encode(), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*Channel](r) +} + +// GetChannelsForTeamAndUserWithLastDeleteAt returns a list channels of a team for a user, additionally filtered with lastDeleteAt. This does not have any effect if includeDeleted is set to false. +func (c *Client4) GetChannelsForTeamAndUserWithLastDeleteAt(ctx context.Context, teamId, userId string, includeDeleted bool, lastDeleteAt int, etag string) ([]*Channel, *Response, error) { + values := url.Values{} + values.Set("include_deleted", c.boolString(includeDeleted)) + values.Set("last_delete_at", strconv.Itoa(lastDeleteAt)) + route := c.userRoute(userId) + c.teamRoute(teamId) + "/channels?" + values.Encode() + r, err := c.DoAPIGet(ctx, route, etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*Channel](r) +} + +// GetChannelsForUserWithLastDeleteAt returns a list channels for a user, additionally filtered with lastDeleteAt. +func (c *Client4) GetChannelsForUserWithLastDeleteAt(ctx context.Context, userID string, lastDeleteAt int) ([]*Channel, *Response, error) { + values := url.Values{} + values.Set("last_delete_at", strconv.Itoa(lastDeleteAt)) + route := c.userRoute(userID) + "/channels?" + values.Encode() + r, err := c.DoAPIGet(ctx, route, "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*Channel](r) +} + +// SearchChannels returns the channels on a team matching the provided search term. +func (c *Client4) SearchChannels(ctx context.Context, teamId string, search *ChannelSearch) ([]*Channel, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.channelsForTeamRoute(teamId)+"/search", search) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*Channel](r) +} + +// SearchAllChannels search in all the channels. Must be a system administrator. +func (c *Client4) SearchAllChannels(ctx context.Context, search *ChannelSearch) (ChannelListWithTeamData, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.channelsRoute()+"/search", search) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[ChannelListWithTeamData](r) +} + +// SearchAllChannelsForUser search in all the channels for a regular user. +func (c *Client4) SearchAllChannelsForUser(ctx context.Context, term string) (ChannelListWithTeamData, *Response, error) { + search := &ChannelSearch{ + Term: term, + } + r, err := c.DoAPIPostJSON(ctx, c.channelsRoute()+"/search?system_console=false", search) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[ChannelListWithTeamData](r) +} + +// SearchAllChannelsPaged searches all the channels and returns the results paged with the total count. +func (c *Client4) SearchAllChannelsPaged(ctx context.Context, search *ChannelSearch) (*ChannelsWithCount, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.channelsRoute()+"/search", search) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*ChannelsWithCount](r) +} + +// SearchGroupChannels returns the group channels of the user whose members' usernames match the search term. +func (c *Client4) SearchGroupChannels(ctx context.Context, search *ChannelSearch) ([]*Channel, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.channelsRoute()+"/group/search", search) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*Channel](r) +} + +// DeleteChannel deletes channel based on the provided channel id string. +func (c *Client4) DeleteChannel(ctx context.Context, channelId string) (*Response, error) { + r, err := c.DoAPIDelete(ctx, c.channelRoute(channelId)) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// PermanentDeleteChannel deletes a channel based on the provided channel id string. +func (c *Client4) PermanentDeleteChannel(ctx context.Context, channelId string) (*Response, error) { + r, err := c.DoAPIDelete(ctx, c.channelRoute(channelId)+"?permanent="+c.boolString(true)) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// MoveChannel moves the channel to the destination team. +func (c *Client4) MoveChannel(ctx context.Context, channelId, teamId string, force bool) (*Channel, *Response, error) { + requestBody := map[string]any{ + "team_id": teamId, + "force": force, + } + r, err := c.DoAPIPostJSON(ctx, c.channelRoute(channelId)+"/move", requestBody) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Channel](r) +} + +// GetChannelByName returns a channel based on the provided channel name and team id strings. +func (c *Client4) GetChannelByName(ctx context.Context, channelName, teamId string, etag string) (*Channel, *Response, error) { + r, err := c.DoAPIGet(ctx, c.channelByNameRoute(channelName, teamId), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Channel](r) +} + +// GetChannelByNameIncludeDeleted returns a channel based on the provided channel name and team id strings. Other then GetChannelByName it will also return deleted channels. +func (c *Client4) GetChannelByNameIncludeDeleted(ctx context.Context, channelName, teamId string, etag string) (*Channel, *Response, error) { + r, err := c.DoAPIGet(ctx, c.channelByNameRoute(channelName, teamId)+"?include_deleted="+c.boolString(true), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Channel](r) +} + +// GetChannelByNameForTeamName returns a channel based on the provided channel name and team name strings. +func (c *Client4) GetChannelByNameForTeamName(ctx context.Context, channelName, teamName string, etag string) (*Channel, *Response, error) { + r, err := c.DoAPIGet(ctx, c.channelByNameForTeamNameRoute(channelName, teamName), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Channel](r) +} + +// GetChannelByNameForTeamNameIncludeDeleted returns a channel based on the provided channel name and team name strings. Other then GetChannelByNameForTeamName it will also return deleted channels. +func (c *Client4) GetChannelByNameForTeamNameIncludeDeleted(ctx context.Context, channelName, teamName string, etag string) (*Channel, *Response, error) { + r, err := c.DoAPIGet(ctx, c.channelByNameForTeamNameRoute(channelName, teamName)+"?include_deleted="+c.boolString(true), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Channel](r) +} + +// GetChannelMembers gets a page of channel members specific to a channel. +func (c *Client4) GetChannelMembers(ctx context.Context, channelId string, page, perPage int, etag string) (ChannelMembers, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, c.channelMembersRoute(channelId)+"?"+values.Encode(), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[ChannelMembers](r) +} + +// GetChannelMembersWithTeamData gets a page of all channel members for a user. +func (c *Client4) GetChannelMembersWithTeamData(ctx context.Context, userID string, page, perPage int) (ChannelMembersWithTeamData, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, c.userRoute(userID)+"/channel_members?"+values.Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + + var ch ChannelMembersWithTeamData + + // Check if we need to handle NDJSON format (when page is -1) + if page == -1 { + // Process NDJSON format (each JSON object on new line) + contentType := r.Header.Get("Content-Type") + if contentType == "application/x-ndjson" { + scanner := bufio.NewScanner(r.Body) + ch = ChannelMembersWithTeamData{} + + for scanner.Scan() { + line := scanner.Text() + if line == "" { + continue + } + + var member ChannelMemberWithTeamData + if err = json.Unmarshal([]byte(line), &member); err != nil { + return nil, BuildResponse(r), fmt.Errorf("failed to unmarshal channel member data: %w", err) + } + ch = append(ch, member) + } + + if err = scanner.Err(); err != nil { + return nil, BuildResponse(r), fmt.Errorf("scanner error while reading channel members: %w", err) + } + + return ch, BuildResponse(r), nil + } + } + + // Standard JSON format + return DecodeJSONFromResponse[ChannelMembersWithTeamData](r) +} + +// GetChannelMembersByIds gets the channel members in a channel for a list of user ids. +func (c *Client4) GetChannelMembersByIds(ctx context.Context, channelId string, userIds []string) (ChannelMembers, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.channelMembersRoute(channelId)+"/ids", userIds) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[ChannelMembers](r) +} + +// GetChannelMember gets a channel member. +func (c *Client4) GetChannelMember(ctx context.Context, channelId, userId, etag string) (*ChannelMember, *Response, error) { + r, err := c.DoAPIGet(ctx, c.channelMemberRoute(channelId, userId), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*ChannelMember](r) +} + +// GetChannelMembersForUser gets all the channel members for a user on a team. +func (c *Client4) GetChannelMembersForUser(ctx context.Context, userId, teamId, etag string) (ChannelMembers, *Response, error) { + r, err := c.DoAPIGet(ctx, fmt.Sprintf(c.userRoute(userId)+"/teams/%v/channels/members", teamId), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[ChannelMembers](r) +} + +// ViewChannel performs a view action for a user. Synonymous with switching channels or marking channels as read by a user. +func (c *Client4) ViewChannel(ctx context.Context, userId string, view *ChannelView) (*ChannelViewResponse, *Response, error) { + url := fmt.Sprintf(c.channelsRoute()+"/members/%v/view", userId) + r, err := c.DoAPIPostJSON(ctx, url, view) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*ChannelViewResponse](r) +} + +// ReadMultipleChannels performs a view action on several channels at the same time for a user. +func (c *Client4) ReadMultipleChannels(ctx context.Context, userId string, channelIds []string) (*ChannelViewResponse, *Response, error) { + url := fmt.Sprintf(c.channelsRoute()+"/members/%v/mark_read", userId) + r, err := c.DoAPIPostJSON(ctx, url, channelIds) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*ChannelViewResponse](r) +} + +// GetChannelUnread will return a ChannelUnread object that contains the number of +// unread messages and mentions for a user. +func (c *Client4) GetChannelUnread(ctx context.Context, channelId, userId string) (*ChannelUnread, *Response, error) { + r, err := c.DoAPIGet(ctx, c.userRoute(userId)+c.channelRoute(channelId)+"/unread", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*ChannelUnread](r) +} + +// UpdateChannelRoles will update the roles on a channel for a user. +func (c *Client4) UpdateChannelRoles(ctx context.Context, channelId, userId, roles string) (*Response, error) { + requestBody := map[string]string{"roles": roles} + r, err := c.DoAPIPutJSON(ctx, c.channelMemberRoute(channelId, userId)+"/roles", requestBody) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// UpdateChannelMemberSchemeRoles will update the scheme-derived roles on a channel for a user. +func (c *Client4) UpdateChannelMemberSchemeRoles(ctx context.Context, channelId string, userId string, schemeRoles *SchemeRoles) (*Response, error) { + r, err := c.DoAPIPutJSON(ctx, c.channelMemberRoute(channelId, userId)+"/schemeRoles", schemeRoles) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// UpdateChannelNotifyProps will update the notification properties on a channel for a user. +func (c *Client4) UpdateChannelNotifyProps(ctx context.Context, channelId, userId string, props map[string]string) (*Response, error) { + r, err := c.DoAPIPutJSON(ctx, c.channelMemberRoute(channelId, userId)+"/notify_props", props) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// AddChannelMember adds user to channel and return a channel member. +func (c *Client4) AddChannelMember(ctx context.Context, channelId, userId string) (*ChannelMember, *Response, error) { + requestBody := map[string]string{"user_id": userId} + r, err := c.DoAPIPostJSON(ctx, c.channelMembersRoute(channelId)+"", requestBody) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*ChannelMember](r) +} + +// AddChannelMembers adds users to a channel and return an array of channel members. +func (c *Client4) AddChannelMembers(ctx context.Context, channelId, postRootId string, userIds []string) ([]*ChannelMember, *Response, error) { + requestBody := map[string]any{"user_ids": userIds, "post_root_id": postRootId} + r, err := c.DoAPIPostJSON(ctx, c.channelMembersRoute(channelId)+"", requestBody) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*ChannelMember](r) +} + +// AddChannelMemberWithRootId adds user to channel and return a channel member. Post add to channel message has the postRootId. +func (c *Client4) AddChannelMemberWithRootId(ctx context.Context, channelId, userId, postRootId string) (*ChannelMember, *Response, error) { + requestBody := map[string]string{"user_id": userId, "post_root_id": postRootId} + r, err := c.DoAPIPostJSON(ctx, c.channelMembersRoute(channelId)+"", requestBody) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*ChannelMember](r) +} + +// RemoveUserFromChannel will delete the channel member object for a user, effectively removing the user from a channel. +func (c *Client4) RemoveUserFromChannel(ctx context.Context, channelId, userId string) (*Response, error) { + r, err := c.DoAPIDelete(ctx, c.channelMemberRoute(channelId, userId)) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// AutocompleteChannelsForTeam will return an ordered list of channels autocomplete suggestions. +func (c *Client4) AutocompleteChannelsForTeam(ctx context.Context, teamId, name string) (ChannelList, *Response, error) { + values := url.Values{} + values.Set("name", name) + r, err := c.DoAPIGet(ctx, c.channelsForTeamRoute(teamId)+"/autocomplete?"+values.Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[ChannelList](r) +} + +// AutocompleteChannelsForTeamForSearch will return an ordered list of your channels autocomplete suggestions. +func (c *Client4) AutocompleteChannelsForTeamForSearch(ctx context.Context, teamId, name string) (ChannelList, *Response, error) { + values := url.Values{} + values.Set("name", name) + r, err := c.DoAPIGet(ctx, c.channelsForTeamRoute(teamId)+"/search_autocomplete?"+values.Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[ChannelList](r) +} + +// Post Section + +// CreatePost creates a post based on the provided post struct. +func (c *Client4) CreatePost(ctx context.Context, post *Post) (*Post, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.postsRoute(), post) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Post](r) +} + +// CreatePostEphemeral creates a ephemeral post based on the provided post struct which is send to the given user id. +func (c *Client4) CreatePostEphemeral(ctx context.Context, post *PostEphemeral) (*Post, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.postsEphemeralRoute(), post) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Post](r) +} + +// UpdatePost updates a post based on the provided post struct. +func (c *Client4) UpdatePost(ctx context.Context, postId string, post *Post) (*Post, *Response, error) { + r, err := c.DoAPIPutJSON(ctx, c.postRoute(postId), post) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Post](r) +} + +// PatchPost partially updates a post. Any missing fields are not updated. +func (c *Client4) PatchPost(ctx context.Context, postId string, patch *PostPatch) (*Post, *Response, error) { + r, err := c.DoAPIPutJSON(ctx, c.postRoute(postId)+"/patch", patch) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Post](r) +} + +// SetPostUnread marks channel where post belongs as unread on the time of the provided post. +func (c *Client4) SetPostUnread(ctx context.Context, userId string, postId string, collapsedThreadsSupported bool) (*Response, error) { + reqData := map[string]bool{"collapsed_threads_supported": collapsedThreadsSupported} + r, err := c.DoAPIPostJSON(ctx, c.userRoute(userId)+c.postRoute(postId)+"/set_unread", reqData) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// SetPostReminder creates a post reminder for a given post at a specified time. +// The time needs to be in UTC epoch in seconds. It is always truncated to a +// 5 minute resolution minimum. +func (c *Client4) SetPostReminder(ctx context.Context, reminder *PostReminder) (*Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.userRoute(reminder.UserId)+c.postRoute(reminder.PostId)+"/reminder", reminder) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// PinPost pin a post based on provided post id string. +func (c *Client4) PinPost(ctx context.Context, postId string) (*Response, error) { + r, err := c.DoAPIPost(ctx, c.postRoute(postId)+"/pin", "") + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// UnpinPost unpin a post based on provided post id string. +func (c *Client4) UnpinPost(ctx context.Context, postId string) (*Response, error) { + r, err := c.DoAPIPost(ctx, c.postRoute(postId)+"/unpin", "") + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// GetPost gets a single post. +func (c *Client4) GetPost(ctx context.Context, postId string, etag string) (*Post, *Response, error) { + r, err := c.DoAPIGet(ctx, c.postRoute(postId), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Post](r) +} + +// GetPostIncludeDeleted gets a single post, including deleted. +func (c *Client4) GetPostIncludeDeleted(ctx context.Context, postId string, etag string) (*Post, *Response, error) { + r, err := c.DoAPIGet(ctx, c.postRoute(postId)+"?include_deleted="+c.boolString(true), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Post](r) +} + +// DeletePost deletes a post from the provided post id string. +func (c *Client4) DeletePost(ctx context.Context, postId string) (*Response, error) { + r, err := c.DoAPIDelete(ctx, c.postRoute(postId)) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// PermanentDeletePost permanently deletes a post and its files from the provided post id string. +func (c *Client4) PermanentDeletePost(ctx context.Context, postId string) (*Response, error) { + r, err := c.DoAPIDelete(ctx, c.postRoute(postId)+"?permanent="+c.boolString(true)) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// GetPostThread gets a post with all the other posts in the same thread. +func (c *Client4) GetPostThread(ctx context.Context, postId string, etag string, collapsedThreads bool) (*PostList, *Response, error) { + values := url.Values{} + values.Set("collapsedThreads", c.boolString(collapsedThreads)) + r, err := c.DoAPIGet(ctx, c.postRoute(postId)+"/thread?"+values.Encode(), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*PostList](r) +} + +// GetPostThreadWithOpts gets a post with all the other posts in the same thread. +func (c *Client4) GetPostThreadWithOpts(ctx context.Context, postID string, etag string, opts GetPostsOptions) (*PostList, *Response, error) { + urlVal := c.postRoute(postID) + "/thread" + + values := url.Values{} + if opts.CollapsedThreads { + values.Set("collapsedThreads", "true") + } + if opts.CollapsedThreadsExtended { + values.Set("collapsedThreadsExtended", "true") + } + if opts.SkipFetchThreads { + values.Set("skipFetchThreads", "true") + } + if opts.UpdatesOnly { + values.Set("updatesOnly", "true") + } + if opts.PerPage != 0 { + values.Set("perPage", strconv.Itoa(opts.PerPage)) + } + if opts.FromPost != "" { + values.Set("fromPost", opts.FromPost) + } + if opts.FromCreateAt != 0 { + values.Set("fromCreateAt", strconv.FormatInt(opts.FromCreateAt, 10)) + } + if opts.FromUpdateAt != 0 { + values.Set("fromUpdateAt", strconv.FormatInt(opts.FromUpdateAt, 10)) + } + if opts.Direction != "" { + values.Set("direction", opts.Direction) + } + urlVal += "?" + values.Encode() + + r, err := c.DoAPIGet(ctx, urlVal, etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*PostList](r) +} + +// GetPostsForChannel gets a page of posts with an array for ordering for a channel. +func (c *Client4) GetPostsForChannel(ctx context.Context, channelId string, page, perPage int, etag string, collapsedThreads bool, includeDeleted bool) (*PostList, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + values.Set("collapsedThreads", c.boolString(collapsedThreads)) + values.Set("include_deleted", c.boolString(includeDeleted)) + r, err := c.DoAPIGet(ctx, c.channelRoute(channelId)+"/posts?"+values.Encode(), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*PostList](r) +} + +// GetPostsByIds gets a list of posts by taking an array of post ids +func (c *Client4) GetPostsByIds(ctx context.Context, postIds []string) ([]*Post, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.postsRoute()+"/ids", postIds) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*Post](r) +} + +// GetEditHistoryForPost gets a list of posts by taking a post ids +func (c *Client4) GetEditHistoryForPost(ctx context.Context, postId string) ([]*Post, *Response, error) { + js, err := json.Marshal(postId) + if err != nil { + return nil, nil, fmt.Errorf("failed to marshal edit history request: %w", err) + } + r, err := c.DoAPIGet(ctx, c.postRoute(postId)+"/edit_history", string(js)) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*Post](r) +} + +// GetFlaggedPostsForUser returns flagged posts of a user based on user id string. +func (c *Client4) GetFlaggedPostsForUser(ctx context.Context, userId string, page int, perPage int) (*PostList, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, c.userRoute(userId)+"/posts/flagged?"+values.Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*PostList](r) +} + +// GetFlaggedPostsForUserInTeam returns flagged posts in team of a user based on user id string. +func (c *Client4) GetFlaggedPostsForUserInTeam(ctx context.Context, userId string, teamId string, page int, perPage int) (*PostList, *Response, error) { + if !IsValidId(teamId) { + return nil, nil, errors.New("teamId is invalid") + } + + values := url.Values{} + values.Set("team_id", teamId) + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, c.userRoute(userId)+"/posts/flagged?"+values.Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*PostList](r) +} + +// GetFlaggedPostsForUserInChannel returns flagged posts in channel of a user based on user id string. +func (c *Client4) GetFlaggedPostsForUserInChannel(ctx context.Context, userId string, channelId string, page int, perPage int) (*PostList, *Response, error) { + if !IsValidId(channelId) { + return nil, nil, errors.New("channelId is invalid") + } + + values := url.Values{} + values.Set("channel_id", channelId) + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, c.userRoute(userId)+"/posts/flagged?"+values.Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*PostList](r) +} + +// GetPostsSince gets posts created after a specified time as Unix time in milliseconds. +func (c *Client4) GetPostsSince(ctx context.Context, channelId string, time int64, collapsedThreads bool) (*PostList, *Response, error) { + values := url.Values{} + values.Set("since", strconv.FormatInt(time, 10)) + values.Set("collapsedThreads", c.boolString(collapsedThreads)) + r, err := c.DoAPIGet(ctx, c.channelRoute(channelId)+"/posts?"+values.Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*PostList](r) +} + +// GetPostsAfter gets a page of posts that were posted after the post provided. +func (c *Client4) GetPostsAfter(ctx context.Context, channelId, postId string, page, perPage int, etag string, collapsedThreads bool, includeDeleted bool) (*PostList, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + values.Set("after", postId) + values.Set("collapsedThreads", c.boolString(collapsedThreads)) + values.Set("include_deleted", c.boolString(includeDeleted)) + r, err := c.DoAPIGet(ctx, c.channelRoute(channelId)+"/posts?"+values.Encode(), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*PostList](r) +} + +// GetPostsBefore gets a page of posts that were posted before the post provided. +func (c *Client4) GetPostsBefore(ctx context.Context, channelId, postId string, page, perPage int, etag string, collapsedThreads bool, includeDeleted bool) (*PostList, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + values.Set("before", postId) + values.Set("collapsedThreads", c.boolString(collapsedThreads)) + values.Set("include_deleted", c.boolString(includeDeleted)) + r, err := c.DoAPIGet(ctx, c.channelRoute(channelId)+"/posts?"+values.Encode(), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*PostList](r) +} + +// MoveThread moves a thread based on provided post id, and channel id string. +func (c *Client4) MoveThread(ctx context.Context, postId string, params *MoveThreadParams) (*Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.postRoute(postId)+"/move", params) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// GetPostsAroundLastUnread gets a list of posts around last unread post by a user in a channel. +func (c *Client4) GetPostsAroundLastUnread(ctx context.Context, userId, channelId string, limitBefore, limitAfter int, collapsedThreads bool) (*PostList, *Response, error) { + values := url.Values{} + values.Set("limit_before", strconv.Itoa(limitBefore)) + values.Set("limit_after", strconv.Itoa(limitAfter)) + values.Set("collapsedThreads", c.boolString(collapsedThreads)) + r, err := c.DoAPIGet(ctx, c.userRoute(userId)+c.channelRoute(channelId)+"/posts/unread?"+values.Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*PostList](r) +} + +func (c *Client4) CreateScheduledPost(ctx context.Context, scheduledPost *ScheduledPost) (*ScheduledPost, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.postsRoute()+"/schedule", scheduledPost) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*ScheduledPost](r) +} + +func (c *Client4) GetUserScheduledPosts(ctx context.Context, teamId string, includeDirectChannels bool) (map[string][]*ScheduledPost, *Response, error) { + values := url.Values{} + values.Set("includeDirectChannels", fmt.Sprintf("%t", includeDirectChannels)) + r, err := c.DoAPIGet(ctx, c.postsRoute()+"/scheduled/team/"+teamId+"?"+values.Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[map[string][]*ScheduledPost](r) +} + +func (c *Client4) UpdateScheduledPost(ctx context.Context, scheduledPost *ScheduledPost) (*ScheduledPost, *Response, error) { + r, err := c.DoAPIPutJSON(ctx, c.postsRoute()+"/schedule/"+scheduledPost.Id, scheduledPost) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*ScheduledPost](r) +} + +func (c *Client4) DeleteScheduledPost(ctx context.Context, scheduledPostId string) (*ScheduledPost, *Response, error) { + r, err := c.DoAPIDelete(ctx, c.postsRoute()+"/schedule/"+scheduledPostId) + if err != nil { + return nil, BuildResponse(r), err + } + + defer closeBody(r) + return DecodeJSONFromResponse[*ScheduledPost](r) +} + +func (c *Client4) FlagPostForContentReview(ctx context.Context, postId string, flagRequest *FlagContentRequest) (*Response, error) { + r, err := c.DoAPIPostJSON(ctx, fmt.Sprintf("%s/post/%s/flag", c.contentFlaggingRoute(), postId), flagRequest) + if err != nil { + return BuildResponse(r), err + } + + defer closeBody(r) + return BuildResponse(r), nil +} + +func (c *Client4) GetContentFlaggedPost(ctx context.Context, postId string) (*Post, *Response, error) { + r, err := c.DoAPIGet(ctx, c.contentFlaggingRoute()+"/post/"+postId, "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + + return DecodeJSONFromResponse[*Post](r) +} + +func (c *Client4) GetFlaggingConfiguration(ctx context.Context) (*ContentFlaggingReportingConfig, *Response, error) { + r, err := c.DoAPIGet(ctx, c.contentFlaggingRoute()+"/flag/config", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*ContentFlaggingReportingConfig](r) +} + +func (c *Client4) GetTeamPostFlaggingFeatureStatus(ctx context.Context, teamId string) (map[string]bool, *Response, error) { + r, err := c.DoAPIGet(ctx, c.contentFlaggingRoute()+"/team/"+teamId+"/status", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[map[string]bool](r) +} + +func (c *Client4) SaveContentFlaggingSettings(ctx context.Context, config *ContentFlaggingSettingsRequest) (*Response, error) { + r, err := c.DoAPIPutJSON(ctx, c.contentFlaggingRoute()+"/config", config) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +func (c *Client4) GetContentFlaggingSettings(ctx context.Context) (*ContentFlaggingSettingsRequest, *Response, error) { + r, err := c.DoAPIGet(ctx, c.contentFlaggingRoute()+"/config", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*ContentFlaggingSettingsRequest](r) +} + +func (c *Client4) AssignContentFlaggingReviewer(ctx context.Context, postId, reviewerId string) (*Response, error) { + r, err := c.DoAPIPost(ctx, fmt.Sprintf("%s/post/%s/assign/%s", c.contentFlaggingRoute(), postId, reviewerId), "") + if err != nil { + return BuildResponse(r), err + } + + defer closeBody(r) + return BuildResponse(r), nil +} + +func (c *Client4) SearchContentFlaggingReviewers(ctx context.Context, teamID, term string) ([]*User, *Response, error) { + values := url.Values{} + values.Set("term", term) + r, err := c.DoAPIGet(ctx, c.contentFlaggingRoute()+"/team/"+teamID+"/reviewers/search?"+values.Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + + defer closeBody(r) + return DecodeJSONFromResponse[[]*User](r) +} + +// SearchFiles returns any posts with matching terms string. +func (c *Client4) SearchFiles(ctx context.Context, teamId string, terms string, isOrSearch bool) (*FileInfoList, *Response, error) { + params := SearchParameter{ + Terms: &terms, + IsOrSearch: &isOrSearch, + } + return c.SearchFilesWithParams(ctx, teamId, ¶ms) +} + +// SearchFilesWithParams returns any posts with matching terms string. +func (c *Client4) SearchFilesWithParams(ctx context.Context, teamId string, params *SearchParameter) (*FileInfoList, *Response, error) { + route := c.teamRoute(teamId) + "/files/search" + if teamId == "" { + route = c.filesRoute() + "/search" + } + r, err := c.DoAPIPostJSON(ctx, route, params) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*FileInfoList](r) +} + +// SearchFilesAcrossTeams returns any posts with matching terms string. +func (c *Client4) SearchFilesAcrossTeams(ctx context.Context, terms string, isOrSearch bool) (*FileInfoList, *Response, error) { + params := SearchParameter{ + Terms: &terms, + IsOrSearch: &isOrSearch, + } + return c.SearchFilesWithParams(ctx, "", ¶ms) +} + +// SearchPosts returns any posts with matching terms string. +func (c *Client4) SearchPosts(ctx context.Context, teamId string, terms string, isOrSearch bool) (*PostList, *Response, error) { + params := SearchParameter{ + Terms: &terms, + IsOrSearch: &isOrSearch, + } + return c.SearchPostsWithParams(ctx, teamId, ¶ms) +} + +// SearchPostsWithParams returns any posts with matching terms string. +func (c *Client4) SearchPostsWithParams(ctx context.Context, teamId string, params *SearchParameter) (*PostList, *Response, error) { + var route string + if teamId == "" { + route = c.postsRoute() + "/search" + } else { + route = c.teamRoute(teamId) + "/posts/search" + } + r, err := c.DoAPIPostJSON(ctx, route, params) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*PostList](r) +} + +// SearchPostsWithMatches returns any posts with matching terms string, including. +func (c *Client4) SearchPostsWithMatches(ctx context.Context, teamId string, terms string, isOrSearch bool) (*PostSearchResults, *Response, error) { + requestBody := map[string]any{"terms": terms, "is_or_search": isOrSearch} + var route string + if teamId == "" { + route = c.postsRoute() + "/search" + } else { + route = c.teamRoute(teamId) + "/posts/search" + } + r, err := c.DoAPIPostJSON(ctx, route, requestBody) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*PostSearchResults](r) +} + +// DoPostAction performs a post action. +func (c *Client4) DoPostAction(ctx context.Context, postId, actionId string) (*Response, error) { + r, err := c.DoAPIPost(ctx, c.postRoute(postId)+"/actions/"+actionId, "") + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// DoPostActionWithCookie performs a post action with extra arguments +func (c *Client4) DoPostActionWithCookie(ctx context.Context, postId, actionId, selected, cookieStr string) (*Response, error) { + if selected == "" && cookieStr == "" { + r, err := c.DoAPIPost(ctx, c.postRoute(postId)+"/actions/"+actionId, "") + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil + } + + req := DoPostActionRequest{ + SelectedOption: selected, + Cookie: cookieStr, + } + r, err := c.DoAPIPostJSON(ctx, c.postRoute(postId)+"/actions/"+actionId, req) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// OpenInteractiveDialog sends a WebSocket event to a user's clients to +// open interactive dialogs, based on the provided trigger ID and other +// provided data. Used with interactive message buttons, menus and +// slash commands. +func (c *Client4) OpenInteractiveDialog(ctx context.Context, request OpenDialogRequest) (*Response, error) { + r, err := c.DoAPIPostJSON(ctx, "/actions/dialogs/open", request) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// SubmitInteractiveDialog will submit the provided dialog data to the integration +// configured by the URL. Used with the interactive dialogs integration feature. +func (c *Client4) SubmitInteractiveDialog(ctx context.Context, request SubmitDialogRequest) (*SubmitDialogResponse, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, "/actions/dialogs/submit", request) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*SubmitDialogResponse](r) +} + +// LookupInteractiveDialog will perform a lookup request for dynamic select elements +// in interactive dialogs. Used to fetch options for dynamic select fields. +func (c *Client4) LookupInteractiveDialog(ctx context.Context, request SubmitDialogRequest) (*LookupDialogResponse, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, "/actions/dialogs/lookup", request) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + + return DecodeJSONFromResponse[*LookupDialogResponse](r) +} + +// UploadFile will upload a file to a channel using a multipart request, to be later attached to a post. +// This method is functionally equivalent to Client4.UploadFileAsRequestBody. +func (c *Client4) UploadFile(ctx context.Context, data []byte, channelId string, filename string) (*FileUploadResponse, *Response, error) { + body := &bytes.Buffer{} + writer := multipart.NewWriter(body) + + part, err := writer.CreateFormField("channel_id") + if err != nil { + return nil, nil, err + } + + _, err = io.Copy(part, strings.NewReader(channelId)) + if err != nil { + return nil, nil, err + } + + part, err = writer.CreateFormFile("files", filename) + if err != nil { + return nil, nil, err + } + _, err = io.Copy(part, bytes.NewBuffer(data)) + if err != nil { + return nil, nil, err + } + + err = writer.Close() + if err != nil { + return nil, nil, err + } + + return c.DoUploadFile(ctx, c.filesRoute(), body.Bytes(), writer.FormDataContentType()) +} + +// UploadFileAsRequestBody will upload a file to a channel as the body of a request, to be later attached +// to a post. This method is functionally equivalent to Client4.UploadFile. +func (c *Client4) UploadFileAsRequestBody(ctx context.Context, data []byte, channelId string, filename string) (*FileUploadResponse, *Response, error) { + values := url.Values{} + values.Set("channel_id", channelId) + values.Set("filename", filename) + return c.DoUploadFile(ctx, c.filesRoute()+"?"+values.Encode(), data, http.DetectContentType(data)) +} + +// GetFile gets the bytes for a file by id. +func (c *Client4) GetFile(ctx context.Context, fileId string) ([]byte, *Response, error) { + r, err := c.DoAPIGet(ctx, c.fileRoute(fileId), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return ReadBytesFromResponse(r) +} + +func (c *Client4) GetFileAsContentReviewer(ctx context.Context, fileId, flaggedPostId string) ([]byte, *Response, error) { + values := url.Values{} + values.Set(AsContentReviewerParam, c.boolString(true)) + values.Set("flagged_post_id", flaggedPostId) + + r, err := c.DoAPIGet(ctx, c.fileRoute(fileId)+"?"+values.Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return ReadBytesFromResponse(r) +} + +// DownloadFile gets the bytes for a file by id, optionally adding headers to force the browser to download it. +func (c *Client4) DownloadFile(ctx context.Context, fileId string, download bool) ([]byte, *Response, error) { + values := url.Values{} + values.Set("download", c.boolString(download)) + r, err := c.DoAPIGet(ctx, c.fileRoute(fileId)+"?"+values.Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return ReadBytesFromResponse(r) +} + +// GetFileThumbnail gets the bytes for a file by id. +func (c *Client4) GetFileThumbnail(ctx context.Context, fileId string) ([]byte, *Response, error) { + r, err := c.DoAPIGet(ctx, c.fileRoute(fileId)+"/thumbnail", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return ReadBytesFromResponse(r) +} + +// DownloadFileThumbnail gets the bytes for a file by id, optionally adding headers to force the browser to download it. +func (c *Client4) DownloadFileThumbnail(ctx context.Context, fileId string, download bool) ([]byte, *Response, error) { + values := url.Values{} + values.Set("download", c.boolString(download)) + r, err := c.DoAPIGet(ctx, c.fileRoute(fileId)+"/thumbnail?"+values.Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return ReadBytesFromResponse(r) +} + +// GetFileLink gets the public link of a file by id. +func (c *Client4) GetFileLink(ctx context.Context, fileId string) (string, *Response, error) { + r, err := c.DoAPIGet(ctx, c.fileRoute(fileId)+"/link", "") + if err != nil { + return "", BuildResponse(r), err + } + defer closeBody(r) + result, resp, err := DecodeJSONFromResponse[map[string]string](r) + if err != nil { + return "", resp, err + } + return result["link"], resp, nil +} + +// GetFilePreview gets the bytes for a file by id. +func (c *Client4) GetFilePreview(ctx context.Context, fileId string) ([]byte, *Response, error) { + r, err := c.DoAPIGet(ctx, c.fileRoute(fileId)+"/preview", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return ReadBytesFromResponse(r) +} + +// DownloadFilePreview gets the bytes for a file by id. +func (c *Client4) DownloadFilePreview(ctx context.Context, fileId string, download bool) ([]byte, *Response, error) { + values := url.Values{} + values.Set("download", c.boolString(download)) + r, err := c.DoAPIGet(ctx, c.fileRoute(fileId)+"/preview?"+values.Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return ReadBytesFromResponse(r) +} + +// GetFileInfo gets all the file info objects. +func (c *Client4) GetFileInfo(ctx context.Context, fileId string) (*FileInfo, *Response, error) { + r, err := c.DoAPIGet(ctx, c.fileRoute(fileId)+"/info", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*FileInfo](r) +} + +// GetFileInfosForPost gets all the file info objects attached to a post. +func (c *Client4) GetFileInfosForPost(ctx context.Context, postId string, etag string) ([]*FileInfo, *Response, error) { + r, err := c.DoAPIGet(ctx, c.postRoute(postId)+"/files/info", etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*FileInfo](r) +} + +// GetFileInfosForPost gets all the file info objects attached to a post, including deleted +func (c *Client4) GetFileInfosForPostIncludeDeleted(ctx context.Context, postId string, etag string) ([]*FileInfo, *Response, error) { + r, err := c.DoAPIGet(ctx, c.postRoute(postId)+"/files/info"+"?include_deleted="+c.boolString(true), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*FileInfo](r) +} + +// General/System Section + +// GenerateSupportPacket generates and downloads a Support Packet. +// It returns a ReadCloser to the packet and the filename. The caller needs to close the ReadCloser. +func (c *Client4) GenerateSupportPacket(ctx context.Context) (io.ReadCloser, string, *Response, error) { + r, err := c.DoAPIGet(ctx, c.systemRoute()+"/support_packet", "") + if err != nil { + return nil, "", BuildResponse(r), err + } + + _, params, err := mime.ParseMediaType(r.Header.Get("Content-Disposition")) + if err != nil { + return nil, "", BuildResponse(r), fmt.Errorf("could not parse Content-Disposition header: %w", err) + } + + return r.Body, params["filename"], BuildResponse(r), nil +} + +// GetPing will return ok if the running goRoutines are below the threshold and unhealthy for above. +// DEPRECATED: Use GetPingWithOptions method instead. +func (c *Client4) GetPing(ctx context.Context) (string, *Response, error) { + ping, resp, err := c.GetPingWithOptions(ctx, SystemPingOptions{}) + status := "" + if ping != nil { + status = ping["status"].(string) + } + return status, resp, err +} + +// GetPingWithServerStatus will return ok if several basic server health checks +// all pass successfully. +// DEPRECATED: Use GetPingWithOptions method instead. +func (c *Client4) GetPingWithServerStatus(ctx context.Context) (string, *Response, error) { + ping, resp, err := c.GetPingWithOptions(ctx, SystemPingOptions{FullStatus: true}) + status := "" + if ping != nil { + status = ping["status"].(string) + } + return status, resp, err +} + +// GetPingWithFullServerStatus will return the full status if several basic server +// health checks all pass successfully. +// DEPRECATED: Use GetPingWithOptions method instead. +func (c *Client4) GetPingWithFullServerStatus(ctx context.Context) (map[string]any, *Response, error) { + return c.GetPingWithOptions(ctx, SystemPingOptions{FullStatus: true}) +} + +// GetPingWithOptions will return the status according to the options +func (c *Client4) GetPingWithOptions(ctx context.Context, options SystemPingOptions) (map[string]any, *Response, error) { + pingURL, err := url.Parse(c.systemRoute() + "/ping") + if err != nil { + return nil, nil, fmt.Errorf("could not parse query: %w", err) + } + values := pingURL.Query() + values.Set("get_server_status", c.boolString(options.FullStatus)) + values.Set("use_rest_semantics", c.boolString(options.RESTSemantics)) + pingURL.RawQuery = values.Encode() + r, err := c.DoAPIGet(ctx, pingURL.String(), "") + if r != nil && r.StatusCode == 500 { + defer r.Body.Close() + return map[string]any{"status": StatusUnhealthy}, BuildResponse(r), err + } + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[map[string]any](r) +} + +func (c *Client4) GetServerLimits(ctx context.Context) (*ServerLimits, *Response, error) { + r, err := c.DoAPIGet(ctx, c.limitsRoute()+"/server", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*ServerLimits](r) +} + +// TestEmail will attempt to connect to the configured SMTP server. +func (c *Client4) TestEmail(ctx context.Context, config *Config) (*Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.testEmailRoute(), config) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +func (c *Client4) TestNotifications(ctx context.Context) (*Response, error) { + r, err := c.DoAPIPost(ctx, c.testNotificationRoute(), "") + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// TestSiteURL will test the validity of a site URL. +func (c *Client4) TestSiteURL(ctx context.Context, siteURL string) (*Response, error) { + requestBody := make(map[string]string) + requestBody["site_url"] = siteURL + r, err := c.DoAPIPostJSON(ctx, c.testSiteURLRoute(), requestBody) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// TestS3Connection will attempt to connect to the AWS S3. +func (c *Client4) TestS3Connection(ctx context.Context, config *Config) (*Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.testS3Route(), config) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// GetConfig will retrieve the server config with some sanitized items. +func (c *Client4) GetConfig(ctx context.Context) (*Config, *Response, error) { + r, err := c.DoAPIGet(ctx, c.configRoute(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Config](r) +} + +// GetConfig will retrieve the server config with some sanitized items. +func (c *Client4) GetConfigWithOptions(ctx context.Context, options GetConfigOptions) (map[string]any, *Response, error) { + v := url.Values{} + if options.RemoveDefaults { + v.Set("remove_defaults", "true") + } + if options.RemoveMasked { + v.Set("remove_masked", "true") + } + url := c.configRoute() + if len(v) > 0 { + url += "?" + v.Encode() + } + + r, err := c.DoAPIGet(ctx, url, "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[map[string]any](r) +} + +// ReloadConfig will reload the server configuration. +func (c *Client4) ReloadConfig(ctx context.Context) (*Response, error) { + r, err := c.DoAPIPost(ctx, c.configRoute()+"/reload", "") + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// GetClientConfig will retrieve the parts of the server configuration needed by the client. +func (c *Client4) GetClientConfig(ctx context.Context, etag string) (map[string]string, *Response, error) { + r, err := c.DoAPIGet(ctx, c.configRoute()+"/client", etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[map[string]string](r) +} + +// GetEnvironmentConfig will retrieve a map mirroring the server configuration where fields +// are set to true if the corresponding config setting is set through an environment variable. +// Settings that haven't been set through environment variables will be missing from the map. +func (c *Client4) GetEnvironmentConfig(ctx context.Context) (map[string]any, *Response, error) { + r, err := c.DoAPIGet(ctx, c.configRoute()+"/environment", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return StringInterfaceFromJSON(r.Body), BuildResponse(r), nil +} + +// GetOldClientLicense will retrieve the parts of the server license needed by the +// client, formatted in the old format. +func (c *Client4) GetOldClientLicense(ctx context.Context, etag string) (map[string]string, *Response, error) { + r, err := c.DoAPIGet(ctx, c.licenseRoute()+"/client?format=old", etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[map[string]string](r) +} + +// DatabaseRecycle will recycle the connections. Discard current connection and get new one. +func (c *Client4) DatabaseRecycle(ctx context.Context) (*Response, error) { + r, err := c.DoAPIPost(ctx, c.databaseRoute()+"/recycle", "") + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// InvalidateCaches will purge the cache and can affect the performance while is cleaning. +func (c *Client4) InvalidateCaches(ctx context.Context) (*Response, error) { + r, err := c.DoAPIPost(ctx, c.cacheRoute()+"/invalidate", "") + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// UpdateConfig will update the server configuration. +func (c *Client4) UpdateConfig(ctx context.Context, config *Config) (*Config, *Response, error) { + r, err := c.DoAPIPutJSON(ctx, c.configRoute(), config) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Config](r) +} + +// MigrateConfig will migrate existing config to the new one. +// DEPRECATED: The config migrate API has been moved to be a purely +// mmctl --local endpoint. This method will be removed in a +// future major release. +func (c *Client4) MigrateConfig(ctx context.Context, from, to string) (*Response, error) { + m := make(map[string]string, 2) + m["from"] = from + m["to"] = to + r, err := c.DoAPIPostJSON(ctx, c.configRoute()+"/migrate", m) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// UploadLicenseFile will add a license file to the system. +func (c *Client4) UploadLicenseFile(ctx context.Context, data []byte) (*Response, error) { + body := &bytes.Buffer{} + writer := multipart.NewWriter(body) + + part, err := writer.CreateFormFile("license", "test-license.mattermost-license") + if err != nil { + return nil, fmt.Errorf("failed to create form file for license upload: %w", err) + } + + if _, err = io.Copy(part, bytes.NewBuffer(data)); err != nil { + return nil, fmt.Errorf("failed to copy license data to form file: %w", err) + } + + if err = writer.Close(); err != nil { + return nil, fmt.Errorf("failed to close multipart writer for license upload: %w", err) + } + + r, err := c.doAPIRequestReader(ctx, http.MethodPost, c.APIURL+c.licenseRoute(), writer.FormDataContentType(), body, nil) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// RemoveLicenseFile will remove the server license it exists. Note that this will +// disable all enterprise features. +func (c *Client4) RemoveLicenseFile(ctx context.Context) (*Response, error) { + r, err := c.DoAPIDelete(ctx, c.licenseRoute()) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// GetLicenseLoadMetric retrieves the license load metric from the server. +// The load is calculated as (monthly active users / licensed users) * 1000. +func (c *Client4) GetLicenseLoadMetric(ctx context.Context) (map[string]int, *Response, error) { + r, err := c.DoAPIGet(ctx, c.licenseRoute()+"/load_metric", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[map[string]int](r) +} + +// GetAnalyticsOld will retrieve analytics using the old format. New format is not +// available but the "/analytics" endpoint is reserved for it. The "name" argument is optional +// and defaults to "standard". The "teamId" argument is optional and will limit results +// to a specific team. +func (c *Client4) GetAnalyticsOld(ctx context.Context, name, teamId string) (AnalyticsRows, *Response, error) { + values := url.Values{} + values.Set("name", name) + values.Set("team_id", teamId) + r, err := c.DoAPIGet(ctx, c.analyticsRoute()+"/old?"+values.Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[AnalyticsRows](r) +} + +// Webhooks Section + +// CreateIncomingWebhook creates an incoming webhook for a channel. +func (c *Client4) CreateIncomingWebhook(ctx context.Context, hook *IncomingWebhook) (*IncomingWebhook, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.incomingWebhooksRoute(), hook) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*IncomingWebhook](r) +} + +// UpdateIncomingWebhook updates an incoming webhook for a channel. +func (c *Client4) UpdateIncomingWebhook(ctx context.Context, hook *IncomingWebhook) (*IncomingWebhook, *Response, error) { + r, err := c.DoAPIPutJSON(ctx, c.incomingWebhookRoute(hook.Id), hook) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*IncomingWebhook](r) +} + +// GetIncomingWebhooks returns a page of incoming webhooks on the system. Page counting starts at 0. +func (c *Client4) GetIncomingWebhooks(ctx context.Context, page int, perPage int, etag string) ([]*IncomingWebhook, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, c.incomingWebhooksRoute()+"?"+values.Encode(), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*IncomingWebhook](r) +} + +// GetIncomingWebhooksWithCount returns a page of incoming webhooks on the system including the total count. Page counting starts at 0. +func (c *Client4) GetIncomingWebhooksWithCount(ctx context.Context, page int, perPage int, etag string) (*IncomingWebhooksWithCount, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + values.Set("include_total_count", c.boolString(true)) + r, err := c.DoAPIGet(ctx, c.incomingWebhooksRoute()+"?"+values.Encode(), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*IncomingWebhooksWithCount](r) +} + +// GetIncomingWebhooksForTeam returns a page of incoming webhooks for a team. Page counting starts at 0. +func (c *Client4) GetIncomingWebhooksForTeam(ctx context.Context, teamId string, page int, perPage int, etag string) ([]*IncomingWebhook, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + values.Set("team_id", teamId) + r, err := c.DoAPIGet(ctx, c.incomingWebhooksRoute()+"?"+values.Encode(), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*IncomingWebhook](r) +} + +// GetIncomingWebhook returns an Incoming webhook given the hook ID. +func (c *Client4) GetIncomingWebhook(ctx context.Context, hookID string, etag string) (*IncomingWebhook, *Response, error) { + r, err := c.DoAPIGet(ctx, c.incomingWebhookRoute(hookID), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*IncomingWebhook](r) +} + +// DeleteIncomingWebhook deletes and Incoming Webhook given the hook ID. +func (c *Client4) DeleteIncomingWebhook(ctx context.Context, hookID string) (*Response, error) { + r, err := c.DoAPIDelete(ctx, c.incomingWebhookRoute(hookID)) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// CreateOutgoingWebhook creates an outgoing webhook for a team or channel. +func (c *Client4) CreateOutgoingWebhook(ctx context.Context, hook *OutgoingWebhook) (*OutgoingWebhook, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.outgoingWebhooksRoute(), hook) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*OutgoingWebhook](r) +} + +// UpdateOutgoingWebhook creates an outgoing webhook for a team or channel. +func (c *Client4) UpdateOutgoingWebhook(ctx context.Context, hook *OutgoingWebhook) (*OutgoingWebhook, *Response, error) { + r, err := c.DoAPIPutJSON(ctx, c.outgoingWebhookRoute(hook.Id), hook) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*OutgoingWebhook](r) +} + +// GetOutgoingWebhooks returns a page of outgoing webhooks on the system. Page counting starts at 0. +func (c *Client4) GetOutgoingWebhooks(ctx context.Context, page int, perPage int, etag string) ([]*OutgoingWebhook, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, c.outgoingWebhooksRoute()+"?"+values.Encode(), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*OutgoingWebhook](r) +} + +// GetOutgoingWebhook outgoing webhooks on the system requested by Hook Id. +func (c *Client4) GetOutgoingWebhook(ctx context.Context, hookId string) (*OutgoingWebhook, *Response, error) { + r, err := c.DoAPIGet(ctx, c.outgoingWebhookRoute(hookId), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*OutgoingWebhook](r) +} + +// GetOutgoingWebhooksForChannel returns a page of outgoing webhooks for a channel. Page counting starts at 0. +func (c *Client4) GetOutgoingWebhooksForChannel(ctx context.Context, channelId string, page int, perPage int, etag string) ([]*OutgoingWebhook, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + values.Set("channel_id", channelId) + r, err := c.DoAPIGet(ctx, c.outgoingWebhooksRoute()+"?"+values.Encode(), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*OutgoingWebhook](r) +} + +// GetOutgoingWebhooksForTeam returns a page of outgoing webhooks for a team. Page counting starts at 0. +func (c *Client4) GetOutgoingWebhooksForTeam(ctx context.Context, teamId string, page int, perPage int, etag string) ([]*OutgoingWebhook, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + values.Set("team_id", teamId) + r, err := c.DoAPIGet(ctx, c.outgoingWebhooksRoute()+"?"+values.Encode(), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*OutgoingWebhook](r) +} + +// RegenOutgoingHookToken regenerate the outgoing webhook token. +func (c *Client4) RegenOutgoingHookToken(ctx context.Context, hookId string) (*OutgoingWebhook, *Response, error) { + r, err := c.DoAPIPost(ctx, c.outgoingWebhookRoute(hookId)+"/regen_token", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*OutgoingWebhook](r) +} + +// DeleteOutgoingWebhook delete the outgoing webhook on the system requested by Hook Id. +func (c *Client4) DeleteOutgoingWebhook(ctx context.Context, hookId string) (*Response, error) { + r, err := c.DoAPIDelete(ctx, c.outgoingWebhookRoute(hookId)) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// Preferences Section + +// GetPreferences returns the user's preferences. +func (c *Client4) GetPreferences(ctx context.Context, userId string) (Preferences, *Response, error) { + r, err := c.DoAPIGet(ctx, c.preferencesRoute(userId), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[Preferences](r) +} + +// UpdatePreferences saves the user's preferences. +func (c *Client4) UpdatePreferences(ctx context.Context, userId string, preferences Preferences) (*Response, error) { + r, err := c.DoAPIPutJSON(ctx, c.preferencesRoute(userId), preferences) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// DeletePreferences deletes the user's preferences. +func (c *Client4) DeletePreferences(ctx context.Context, userId string, preferences Preferences) (*Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.preferencesRoute(userId)+"/delete", preferences) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// GetPreferencesByCategory returns the user's preferences from the provided category string. +func (c *Client4) GetPreferencesByCategory(ctx context.Context, userId string, category string) (Preferences, *Response, error) { + url := fmt.Sprintf(c.preferencesRoute(userId)+"/%s", category) + r, err := c.DoAPIGet(ctx, url, "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[Preferences](r) +} + +// GetPreferenceByCategoryAndName returns the user's preferences from the provided category and preference name string. +func (c *Client4) GetPreferenceByCategoryAndName(ctx context.Context, userId string, category string, preferenceName string) (*Preference, *Response, error) { + url := fmt.Sprintf(c.preferencesRoute(userId)+"/%s/name/%v", category, preferenceName) + r, err := c.DoAPIGet(ctx, url, "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Preference](r) +} + +// SAML Section + +// GetSamlMetadata returns metadata for the SAML configuration. +func (c *Client4) GetSamlMetadata(ctx context.Context) (string, *Response, error) { + r, err := c.DoAPIGet(ctx, c.samlRoute()+"/metadata", "") + if err != nil { + return "", BuildResponse(r), err + } + defer closeBody(r) + + buf := new(bytes.Buffer) + _, err = buf.ReadFrom(r.Body) + if err != nil { + return "", BuildResponse(r), err + } + + return buf.String(), BuildResponse(r), nil +} + +func fileToMultipart(data []byte, filename string) ([]byte, *multipart.Writer, error) { + body := &bytes.Buffer{} + writer := multipart.NewWriter(body) + + part, err := writer.CreateFormFile("certificate", filename) + if err != nil { + return nil, nil, err + } + + if _, err = io.Copy(part, bytes.NewBuffer(data)); err != nil { + return nil, nil, err + } + + if err = writer.Close(); err != nil { + return nil, nil, err + } + + return body.Bytes(), writer, nil +} + +// UploadSamlIdpCertificate will upload an IDP certificate for SAML and set the config to use it. +// The filename parameter is deprecated and ignored: the server will pick a hard-coded filename when writing to disk. +func (c *Client4) UploadSamlIdpCertificate(ctx context.Context, data []byte, filename string) (*Response, error) { + body, writer, err := fileToMultipart(data, filename) + if err != nil { + return nil, fmt.Errorf("failed to prepare SAML IDP certificate for upload: %w", err) + } + + _, resp, err := c.DoUploadFile(ctx, c.samlRoute()+"/certificate/idp", body, writer.FormDataContentType()) + return resp, err +} + +// UploadSamlPublicCertificate will upload a public certificate for SAML and set the config to use it. +// The filename parameter is deprecated and ignored: the server will pick a hard-coded filename when writing to disk. +func (c *Client4) UploadSamlPublicCertificate(ctx context.Context, data []byte, filename string) (*Response, error) { + body, writer, err := fileToMultipart(data, filename) + if err != nil { + return nil, fmt.Errorf("failed to prepare SAML public certificate for upload: %w", err) + } + + _, resp, err := c.DoUploadFile(ctx, c.samlRoute()+"/certificate/public", body, writer.FormDataContentType()) + return resp, err +} + +// UploadSamlPrivateCertificate will upload a private key for SAML and set the config to use it. +// The filename parameter is deprecated and ignored: the server will pick a hard-coded filename when writing to disk. +func (c *Client4) UploadSamlPrivateCertificate(ctx context.Context, data []byte, filename string) (*Response, error) { + body, writer, err := fileToMultipart(data, filename) + if err != nil { + return nil, fmt.Errorf("failed to prepare SAML private certificate for upload: %w", err) + } + + _, resp, err := c.DoUploadFile(ctx, c.samlRoute()+"/certificate/private", body, writer.FormDataContentType()) + return resp, err +} + +// DeleteSamlIdpCertificate deletes the SAML IDP certificate from the server and updates the config to not use it and disable SAML. +func (c *Client4) DeleteSamlIdpCertificate(ctx context.Context) (*Response, error) { + r, err := c.DoAPIDelete(ctx, c.samlRoute()+"/certificate/idp") + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// DeleteSamlPublicCertificate deletes the SAML IDP certificate from the server and updates the config to not use it and disable SAML. +func (c *Client4) DeleteSamlPublicCertificate(ctx context.Context) (*Response, error) { + r, err := c.DoAPIDelete(ctx, c.samlRoute()+"/certificate/public") + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// DeleteSamlPrivateCertificate deletes the SAML IDP certificate from the server and updates the config to not use it and disable SAML. +func (c *Client4) DeleteSamlPrivateCertificate(ctx context.Context) (*Response, error) { + r, err := c.DoAPIDelete(ctx, c.samlRoute()+"/certificate/private") + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// GetSamlCertificateStatus returns metadata for the SAML configuration. +func (c *Client4) GetSamlCertificateStatus(ctx context.Context) (*SamlCertificateStatus, *Response, error) { + r, err := c.DoAPIGet(ctx, c.samlRoute()+"/certificate/status", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*SamlCertificateStatus](r) +} + +func (c *Client4) GetSamlMetadataFromIdp(ctx context.Context, samlMetadataURL string) (*SamlMetadataResponse, *Response, error) { + requestBody := make(map[string]string) + requestBody["saml_metadata_url"] = samlMetadataURL + r, err := c.DoAPIPostJSON(ctx, c.samlRoute()+"/metadatafromidp", requestBody) + if err != nil { + return nil, BuildResponse(r), err + } + + defer closeBody(r) + return DecodeJSONFromResponse[*SamlMetadataResponse](r) +} + +// ResetSamlAuthDataToEmail resets the AuthData field of SAML users to their Email. +func (c *Client4) ResetSamlAuthDataToEmail(ctx context.Context, includeDeleted bool, dryRun bool, userIDs []string) (int64, *Response, error) { + params := map[string]any{ + "include_deleted": includeDeleted, + "dry_run": dryRun, + "user_ids": userIDs, + } + r, err := c.DoAPIPostJSON(ctx, c.samlRoute()+"/reset_auth_data", params) + if err != nil { + return 0, BuildResponse(r), err + } + defer closeBody(r) + respBody, resp, err := DecodeJSONFromResponse[map[string]int64](r) + if err != nil { + return 0, resp, err + } + return respBody["num_affected"], resp, nil +} + +// Compliance Section + +// CreateComplianceReport creates an incoming webhook for a channel. +func (c *Client4) CreateComplianceReport(ctx context.Context, report *Compliance) (*Compliance, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.complianceReportsRoute(), report) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Compliance](r) +} + +// GetComplianceReports returns list of compliance reports. +func (c *Client4) GetComplianceReports(ctx context.Context, page, perPage int) (Compliances, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, c.complianceReportsRoute()+"?"+values.Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[Compliances](r) +} + +// GetComplianceReport returns a compliance report. +func (c *Client4) GetComplianceReport(ctx context.Context, reportId string) (*Compliance, *Response, error) { + r, err := c.DoAPIGet(ctx, c.complianceReportRoute(reportId), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Compliance](r) +} + +// DownloadComplianceReport returns a full compliance report as a file. +func (c *Client4) DownloadComplianceReport(ctx context.Context, reportId string) ([]byte, *Response, error) { + r, err := c.DoAPIGet(ctx, c.complianceReportDownloadRoute(reportId), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return ReadBytesFromResponse(r) +} + +// Cluster Section + +// GetClusterStatus returns the status of all the configured cluster nodes. +func (c *Client4) GetClusterStatus(ctx context.Context) ([]*ClusterInfo, *Response, error) { + r, err := c.DoAPIGet(ctx, c.clusterRoute()+"/status", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*ClusterInfo](r) +} + +// LDAP Section + +// SyncLdap starts a run of the LDAP sync job. +func (c *Client4) SyncLdap(ctx context.Context) (*Response, error) { + r, err := c.DoAPIPost(ctx, c.ldapRoute()+"/sync", "") + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// TestLdap will attempt to connect to the configured LDAP server and return OK if configured +// correctly. +func (c *Client4) TestLdap(ctx context.Context) (*Response, error) { + r, err := c.DoAPIPost(ctx, c.ldapRoute()+"/test", "") + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// GetLdapGroups retrieves the immediate child groups of the given parent group. +func (c *Client4) GetLdapGroups(ctx context.Context) ([]*Group, *Response, error) { + path := fmt.Sprintf("%s/groups", c.ldapRoute()) + + r, err := c.DoAPIGet(ctx, path, "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + + responseData, resp, err := DecodeJSONFromResponse[struct { + Count int `json:"count"` + Groups []*Group `json:"groups"` + }](r) + if err != nil { + return nil, BuildResponse(r), fmt.Errorf("failed to decode LDAP groups response: %w", err) + } + for i := range responseData.Groups { + responseData.Groups[i].DisplayName = *responseData.Groups[i].Name + } + + return responseData.Groups, resp, nil +} + +// LinkLdapGroup creates or undeletes a Mattermost group and associates it to the given LDAP group DN. +func (c *Client4) LinkLdapGroup(ctx context.Context, dn string) (*Group, *Response, error) { + path := fmt.Sprintf("%s/groups/%s/link", c.ldapRoute(), dn) + + r, err := c.DoAPIPost(ctx, path, "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Group](r) +} + +// UnlinkLdapGroup deletes the Mattermost group associated with the given LDAP group DN. +func (c *Client4) UnlinkLdapGroup(ctx context.Context, dn string) (*Group, *Response, error) { + path := fmt.Sprintf("%s/groups/%s/link", c.ldapRoute(), dn) + + r, err := c.DoAPIDelete(ctx, path) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Group](r) +} + +// MigrateIdLdap migrates the LDAP enabled users to given attribute +func (c *Client4) MigrateIdLdap(ctx context.Context, toAttribute string) (*Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.ldapRoute()+"/migrateid", map[string]string{ + "toAttribute": toAttribute, + }) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +func (c *Client4) GetGroupsByNames(ctx context.Context, names []string) ([]*Group, *Response, error) { + path := fmt.Sprintf("%s/names", c.groupsRoute()) + + r, err := c.DoAPIPostJSON(ctx, path, names) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*Group](r) +} + +// GetGroupsByChannel retrieves the Mattermost Groups associated with a given channel +func (c *Client4) GetGroupsByChannel(ctx context.Context, channelId string, opts GroupSearchOpts) ([]*GroupWithSchemeAdmin, int, *Response, error) { + values := url.Values{} + values.Set("q", opts.Q) + values.Set("include_member_count", c.boolString(opts.IncludeMemberCount)) + values.Set("filter_allow_reference", c.boolString(opts.FilterAllowReference)) + if opts.PageOpts != nil { + values.Set("page", strconv.Itoa(opts.PageOpts.Page)) + values.Set("per_page", strconv.Itoa(opts.PageOpts.PerPage)) + } + path := c.channelRoute(channelId) + "/groups?" + values.Encode() + r, err := c.DoAPIGet(ctx, path, "") + if err != nil { + return nil, 0, BuildResponse(r), err + } + defer closeBody(r) + + responseData, resp, err := DecodeJSONFromResponse[struct { + Groups []*GroupWithSchemeAdmin `json:"groups"` + Count int `json:"total_group_count"` + }](r) + if err != nil { + return nil, 0, BuildResponse(r), fmt.Errorf("failed to decode groups by channel response: %w", err) + } + + return responseData.Groups, responseData.Count, resp, nil +} + +// GetGroupsByTeam retrieves the Mattermost Groups associated with a given team +func (c *Client4) GetGroupsByTeam(ctx context.Context, teamId string, opts GroupSearchOpts) ([]*GroupWithSchemeAdmin, int, *Response, error) { + values := url.Values{} + values.Set("q", opts.Q) + values.Set("include_member_count", c.boolString(opts.IncludeMemberCount)) + values.Set("filter_allow_reference", c.boolString(opts.FilterAllowReference)) + if opts.PageOpts != nil { + values.Set("page", strconv.Itoa(opts.PageOpts.Page)) + values.Set("per_page", strconv.Itoa(opts.PageOpts.PerPage)) + } + path := c.teamRoute(teamId) + "/groups?" + values.Encode() + + r, err := c.DoAPIGet(ctx, path, "") + if err != nil { + return nil, 0, BuildResponse(r), err + } + defer closeBody(r) + + responseData, resp, err := DecodeJSONFromResponse[struct { + Groups []*GroupWithSchemeAdmin `json:"groups"` + Count int `json:"total_group_count"` + }](r) + if err != nil { + return nil, 0, BuildResponse(r), fmt.Errorf("failed to decode groups by team response: %w", err) + } + + return responseData.Groups, responseData.Count, resp, nil +} + +// GetGroupsAssociatedToChannelsByTeam retrieves the Mattermost Groups associated with channels in a given team +func (c *Client4) GetGroupsAssociatedToChannelsByTeam(ctx context.Context, teamId string, opts GroupSearchOpts) (map[string][]*GroupWithSchemeAdmin, *Response, error) { + values := url.Values{} + values.Set("q", opts.Q) + values.Set("filter_allow_reference", c.boolString(opts.FilterAllowReference)) + if opts.PageOpts != nil { + values.Set("page", strconv.Itoa(opts.PageOpts.Page)) + values.Set("per_page", strconv.Itoa(opts.PageOpts.PerPage)) + } + path := c.teamRoute(teamId) + "/groups_by_channels?" + values.Encode() + r, err := c.DoAPIGet(ctx, path, "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + + responseData, resp, err := DecodeJSONFromResponse[struct { + GroupsAssociatedToChannels map[string][]*GroupWithSchemeAdmin `json:"groups"` + }](r) + if err != nil { + return nil, BuildResponse(r), fmt.Errorf("failed to decode groups associated to channels by team response: %w", err) + } + + return responseData.GroupsAssociatedToChannels, resp, nil +} + +// GetGroups retrieves Mattermost Groups +func (c *Client4) GetGroups(ctx context.Context, opts GroupSearchOpts) ([]*Group, *Response, error) { + path := fmt.Sprintf( + "%s?include_member_count=%v¬_associated_to_team=%v¬_associated_to_channel=%v&filter_allow_reference=%v&q=%v&filter_parent_team_permitted=%v&group_source=%v&include_channel_member_count=%v&include_timezones=%v&include_archived=%v&filter_archived=%v&only_syncable_sources=%v", + c.groupsRoute(), + opts.IncludeMemberCount, + opts.NotAssociatedToTeam, + opts.NotAssociatedToChannel, + opts.FilterAllowReference, + opts.Q, + opts.FilterParentTeamPermitted, + opts.Source, + opts.IncludeChannelMemberCount, + opts.IncludeTimezones, + opts.IncludeArchived, + opts.FilterArchived, + opts.OnlySyncableSources, + ) + if opts.Since > 0 { + path = fmt.Sprintf("%s&since=%v", path, opts.Since) + } + if opts.PageOpts != nil { + path = fmt.Sprintf("%s&page=%v&per_page=%v", path, opts.PageOpts.Page, opts.PageOpts.PerPage) + } + r, err := c.DoAPIGet(ctx, path, "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*Group](r) +} + +// GetGroupsByUserId retrieves Mattermost Groups for a user +func (c *Client4) GetGroupsByUserId(ctx context.Context, userId string) ([]*Group, *Response, error) { + path := fmt.Sprintf( + "%s/%v/groups", + c.usersRoute(), + userId, + ) + + r, err := c.DoAPIGet(ctx, path, "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*Group](r) +} + +func (c *Client4) MigrateAuthToLdap(ctx context.Context, fromAuthService string, matchField string, force bool) (*Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.usersRoute()+"/migrate_auth/ldap", map[string]any{ + "from": fromAuthService, + "force": force, + "match_field": matchField, + }) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +func (c *Client4) MigrateAuthToSaml(ctx context.Context, fromAuthService string, usersMap map[string]string, auto bool) (*Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.usersRoute()+"/migrate_auth/saml", map[string]any{ + "from": fromAuthService, + "auto": auto, + "matches": usersMap, + }) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// UploadLdapPublicCertificate will upload a public certificate for LDAP and set the config to use it. +func (c *Client4) UploadLdapPublicCertificate(ctx context.Context, data []byte) (*Response, error) { + body, writer, err := fileToMultipart(data, LdapPublicCertificateName) + if err != nil { + return nil, fmt.Errorf("failed to prepare LDAP public certificate for upload: %w", err) + } + + _, resp, err := c.DoUploadFile(ctx, c.ldapRoute()+"/certificate/public", body, writer.FormDataContentType()) + return resp, err +} + +// UploadLdapPrivateCertificate will upload a private key for LDAP and set the config to use it. +func (c *Client4) UploadLdapPrivateCertificate(ctx context.Context, data []byte) (*Response, error) { + body, writer, err := fileToMultipart(data, LdapPrivateKeyName) + if err != nil { + return nil, fmt.Errorf("failed to prepare LDAP private certificate for upload: %w", err) + } + + _, resp, err := c.DoUploadFile(ctx, c.ldapRoute()+"/certificate/private", body, writer.FormDataContentType()) + return resp, err +} + +// DeleteLdapPublicCertificate deletes the LDAP IDP certificate from the server and updates the config to not use it and disable LDAP. +func (c *Client4) DeleteLdapPublicCertificate(ctx context.Context) (*Response, error) { + r, err := c.DoAPIDelete(ctx, c.ldapRoute()+"/certificate/public") + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// DeleteLDAPPrivateCertificate deletes the LDAP IDP certificate from the server and updates the config to not use it and disable LDAP. +func (c *Client4) DeleteLdapPrivateCertificate(ctx context.Context) (*Response, error) { + r, err := c.DoAPIDelete(ctx, c.ldapRoute()+"/certificate/private") + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// Audits Section + +// GetAudits returns a list of audits for the whole system. +func (c *Client4) GetAudits(ctx context.Context, page int, perPage int, etag string) (Audits, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, "/audits?"+values.Encode(), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[Audits](r) +} + +// Brand Section + +// GetBrandImage retrieves the previously uploaded brand image. +func (c *Client4) GetBrandImage(ctx context.Context) ([]byte, *Response, error) { + r, err := c.DoAPIGet(ctx, c.brandRoute()+"/image", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + + if r.StatusCode >= 300 { + return nil, BuildResponse(r), AppErrorFromJSON(r.Body) + } + + return ReadBytesFromResponse(r) +} + +// DeleteBrandImage deletes the brand image for the system. +func (c *Client4) DeleteBrandImage(ctx context.Context) (*Response, error) { + r, err := c.DoAPIDelete(ctx, c.brandRoute()+"/image") + if err != nil { + return BuildResponse(r), err + } + return BuildResponse(r), nil +} + +// UploadBrandImage sets the brand image for the system. +func (c *Client4) UploadBrandImage(ctx context.Context, data []byte) (*Response, error) { + body := &bytes.Buffer{} + writer := multipart.NewWriter(body) + + part, err := writer.CreateFormFile("image", "brand.png") + if err != nil { + return nil, fmt.Errorf("failed to create form file for brand image upload: %w", err) + } + + if _, err = io.Copy(part, bytes.NewBuffer(data)); err != nil { + return nil, fmt.Errorf("failed to copy brand image data to form file: %w", err) + } + + if err = writer.Close(); err != nil { + return nil, fmt.Errorf("failed to close multipart writer for brand image upload: %w", err) + } + + r, err := c.doAPIRequestReader(ctx, http.MethodPost, c.APIURL+c.brandRoute()+"/image", writer.FormDataContentType(), body, nil) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// Logs Section + +// GetLogs page of logs as a string array. +func (c *Client4) GetLogs(ctx context.Context, page, perPage int) ([]string, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("logs_per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, "/logs?"+values.Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]string](r) +} + +// Download logs as mattermost.log file +func (c *Client4) DownloadLogs(ctx context.Context) ([]byte, *Response, error) { + r, err := c.DoAPIGet(ctx, "/logs/download", "") + if err != nil { + return nil, BuildResponse(r), err + } + return ReadBytesFromResponse(r) +} + +// PostLog is a convenience Web Service call so clients can log messages into +// the server-side logs. For example we typically log javascript error messages +// into the server-side. It returns the log message if the logging was successful. +func (c *Client4) PostLog(ctx context.Context, message map[string]string) (map[string]string, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, "/logs", message) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[map[string]string](r) +} + +// OAuth Section + +// CreateOAuthApp will register a new OAuth 2.0 client application with Mattermost acting as an OAuth 2.0 service provider. +func (c *Client4) CreateOAuthApp(ctx context.Context, app *OAuthApp) (*OAuthApp, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.oAuthAppsRoute(), app) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*OAuthApp](r) +} + +// UpdateOAuthApp updates a page of registered OAuth 2.0 client applications with Mattermost acting as an OAuth 2.0 service provider. +func (c *Client4) UpdateOAuthApp(ctx context.Context, app *OAuthApp) (*OAuthApp, *Response, error) { + r, err := c.DoAPIPutJSON(ctx, c.oAuthAppRoute(app.Id), app) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*OAuthApp](r) +} + +// GetOAuthApps gets a page of registered OAuth 2.0 client applications with Mattermost acting as an OAuth 2.0 service provider. +func (c *Client4) GetOAuthApps(ctx context.Context, page, perPage int) ([]*OAuthApp, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, c.oAuthAppsRoute()+"?"+values.Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*OAuthApp](r) +} + +// GetOAuthApp gets a registered OAuth 2.0 client application with Mattermost acting as an OAuth 2.0 service provider. +func (c *Client4) GetOAuthApp(ctx context.Context, appId string) (*OAuthApp, *Response, error) { + r, err := c.DoAPIGet(ctx, c.oAuthAppRoute(appId), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*OAuthApp](r) +} + +// GetOAuthAppInfo gets a sanitized version of a registered OAuth 2.0 client application with Mattermost acting as an OAuth 2.0 service provider. +func (c *Client4) GetOAuthAppInfo(ctx context.Context, appId string) (*OAuthApp, *Response, error) { + r, err := c.DoAPIGet(ctx, c.oAuthAppRoute(appId)+"/info", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*OAuthApp](r) +} + +// DeleteOAuthApp deletes a registered OAuth 2.0 client application. +func (c *Client4) DeleteOAuthApp(ctx context.Context, appId string) (*Response, error) { + r, err := c.DoAPIDelete(ctx, c.oAuthAppRoute(appId)) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// RegenerateOAuthAppSecret regenerates the client secret for a registered OAuth 2.0 client application. +func (c *Client4) RegenerateOAuthAppSecret(ctx context.Context, appId string) (*OAuthApp, *Response, error) { + r, err := c.DoAPIPost(ctx, c.oAuthAppRoute(appId)+"/regen_secret", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*OAuthApp](r) +} + +// RegisterOAuthClient registers a new OAuth 2.0 client using Dynamic Client Registration (DCR). +func (c *Client4) RegisterOAuthClient(ctx context.Context, request *ClientRegistrationRequest) (*ClientRegistrationResponse, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.oAuthRegisterRoute(), request) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + + var response ClientRegistrationResponse + if err := json.NewDecoder(r.Body).Decode(&response); err != nil { + return nil, nil, NewAppError("RegisterOAuthClient", "api.unmarshal_error", nil, "", http.StatusInternalServerError).Wrap(err) + } + return &response, BuildResponse(r), nil +} + +// GetAuthorizedOAuthAppsForUser gets a page of OAuth 2.0 client applications the user has authorized to use access their account. +func (c *Client4) GetAuthorizedOAuthAppsForUser(ctx context.Context, userId string, page, perPage int) ([]*OAuthApp, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, c.userRoute(userId)+"/oauth/apps/authorized?"+values.Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*OAuthApp](r) +} + +// AuthorizeOAuthApp will authorize an OAuth 2.0 client application to access a user's account and provide a redirect link to follow. +func (c *Client4) AuthorizeOAuthApp(ctx context.Context, authRequest *AuthorizeRequest) (string, *Response, error) { + buf, err := json.Marshal(authRequest) + if err != nil { + return "", nil, err + } + // The request doesn't go to the /api/v4 subpath, so we can't use the usual helper methods + r, err := c.doAPIRequestBytes(ctx, http.MethodPost, c.URL+"/oauth/authorize", buf, "") + if err != nil { + return "", BuildResponse(r), err + } + defer closeBody(r) + + result, resp, err := DecodeJSONFromResponse[map[string]string](r) + if err != nil { + return "", resp, err + } + return result["redirect"], resp, nil +} + +// DeauthorizeOAuthApp will deauthorize an OAuth 2.0 client application from accessing a user's account. +func (c *Client4) DeauthorizeOAuthApp(ctx context.Context, appId string) (*Response, error) { + requestData := map[string]string{"client_id": appId} + buf, err := json.Marshal(requestData) + if err != nil { + return nil, err + } + // The request doesn't go to the /api/v4 subpath, so we can't use the usual helper methods + r, err := c.doAPIRequestBytes(ctx, http.MethodPost, c.URL+"/oauth/deauthorize", buf, "") + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// GetOAuthAccessToken is a test helper function for the OAuth access token endpoint. +func (c *Client4) GetOAuthAccessToken(ctx context.Context, data url.Values) (*AccessResponse, *Response, error) { + r, err := c.doAPIRequestReader(ctx, http.MethodPost, c.URL+"/oauth/access_token", "application/x-www-form-urlencoded", strings.NewReader(data.Encode()), nil) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + + return DecodeJSONFromResponse[*AccessResponse](r) +} + +// OutgoingOAuthConnection section + +// GetOutgoingOAuthConnections retrieves the outgoing OAuth connections. +func (c *Client4) GetOutgoingOAuthConnections(ctx context.Context, filters OutgoingOAuthConnectionGetConnectionsFilter) ([]*OutgoingOAuthConnection, *Response, error) { + r, err := c.DoAPIGet(ctx, c.outgoingOAuthConnectionsRoute()+"?"+filters.ToURLValues().Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*OutgoingOAuthConnection](r) +} + +// GetOutgoingOAuthConnection retrieves the outgoing OAuth connection with the given ID. +func (c *Client4) GetOutgoingOAuthConnection(ctx context.Context, id string) (*OutgoingOAuthConnection, *Response, error) { + r, err := c.DoAPIGet(ctx, c.outgoingOAuthConnectionRoute(id), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*OutgoingOAuthConnection](r) +} + +// DeleteOutgoingOAuthConnection deletes the outgoing OAuth connection with the given ID. +func (c *Client4) DeleteOutgoingOAuthConnection(ctx context.Context, id string) (*Response, error) { + r, err := c.DoAPIDelete(ctx, c.outgoingOAuthConnectionRoute(id)) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// UpdateOutgoingOAuthConnection updates the outgoing OAuth connection with the given ID. +func (c *Client4) UpdateOutgoingOAuthConnection(ctx context.Context, connection *OutgoingOAuthConnection) (*OutgoingOAuthConnection, *Response, error) { + r, err := c.DoAPIPutJSON(ctx, c.outgoingOAuthConnectionRoute(connection.Id), connection) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*OutgoingOAuthConnection](r) +} + +// CreateOutgoingOAuthConnection creates a new outgoing OAuth connection. +func (c *Client4) CreateOutgoingOAuthConnection(ctx context.Context, connection *OutgoingOAuthConnection) (*OutgoingOAuthConnection, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.outgoingOAuthConnectionsRoute(), connection) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*OutgoingOAuthConnection](r) +} + +// Elasticsearch Section + +// TestElasticsearch will attempt to connect to the configured Elasticsearch server and return OK if configured. +// correctly. +func (c *Client4) TestElasticsearch(ctx context.Context) (*Response, error) { + r, err := c.DoAPIPost(ctx, c.elasticsearchRoute()+"/test", "") + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// PurgeElasticsearchIndexes immediately deletes all Elasticsearch indexes. +func (c *Client4) PurgeElasticsearchIndexes(ctx context.Context) (*Response, error) { + r, err := c.DoAPIPost(ctx, c.elasticsearchRoute()+"/purge_indexes", "") + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// Data Retention Section + +// GetDataRetentionPolicy will get the current global data retention policy details. +func (c *Client4) GetDataRetentionPolicy(ctx context.Context) (*GlobalRetentionPolicy, *Response, error) { + r, err := c.DoAPIGet(ctx, c.dataRetentionRoute()+"/policy", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*GlobalRetentionPolicy](r) +} + +// GetDataRetentionPolicyByID will get the details for the granular data retention policy with the specified ID. +func (c *Client4) GetDataRetentionPolicyByID(ctx context.Context, policyID string) (*RetentionPolicyWithTeamAndChannelCounts, *Response, error) { + r, err := c.DoAPIGet(ctx, c.dataRetentionPolicyRoute(policyID), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*RetentionPolicyWithTeamAndChannelCounts](r) +} + +// GetDataRetentionPoliciesCount will get the total number of granular data retention policies. +func (c *Client4) GetDataRetentionPoliciesCount(ctx context.Context) (int64, *Response, error) { + type CountBody struct { + TotalCount int64 `json:"total_count"` + } + r, err := c.DoAPIGet(ctx, c.dataRetentionRoute()+"/policies_count", "") + if err != nil { + return 0, BuildResponse(r), err + } + countObj, resp, err := DecodeJSONFromResponse[CountBody](r) + if err != nil { + return 0, resp, err + } + return countObj.TotalCount, resp, nil +} + +// GetDataRetentionPolicies will get the current granular data retention policies' details. +func (c *Client4) GetDataRetentionPolicies(ctx context.Context, page, perPage int) (*RetentionPolicyWithTeamAndChannelCountsList, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, c.dataRetentionRoute()+"/policies?"+values.Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*RetentionPolicyWithTeamAndChannelCountsList](r) +} + +// CreateDataRetentionPolicy will create a new granular data retention policy which will be applied to +// the specified teams and channels. The Id field of `policy` must be empty. +func (c *Client4) CreateDataRetentionPolicy(ctx context.Context, policy *RetentionPolicyWithTeamAndChannelIDs) (*RetentionPolicyWithTeamAndChannelCounts, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.dataRetentionRoute()+"/policies", policy) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*RetentionPolicyWithTeamAndChannelCounts](r) +} + +// DeleteDataRetentionPolicy will delete the granular data retention policy with the specified ID. +func (c *Client4) DeleteDataRetentionPolicy(ctx context.Context, policyID string) (*Response, error) { + r, err := c.DoAPIDelete(ctx, c.dataRetentionPolicyRoute(policyID)) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// PatchDataRetentionPolicy will patch the granular data retention policy with the specified ID. +// The Id field of `patch` must be non-empty. +func (c *Client4) PatchDataRetentionPolicy(ctx context.Context, patch *RetentionPolicyWithTeamAndChannelIDs) (*RetentionPolicyWithTeamAndChannelCounts, *Response, error) { + r, err := c.DoAPIPatchJSON(ctx, c.dataRetentionPolicyRoute(patch.ID), patch) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*RetentionPolicyWithTeamAndChannelCounts](r) +} + +// GetTeamsForRetentionPolicy will get the teams to which the specified policy is currently applied. +func (c *Client4) GetTeamsForRetentionPolicy(ctx context.Context, policyID string, page, perPage int) (*TeamsWithCount, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, c.dataRetentionPolicyRoute(policyID)+"/teams?"+values.Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + return DecodeJSONFromResponse[*TeamsWithCount](r) +} + +// SearchTeamsForRetentionPolicy will search the teams to which the specified policy is currently applied. +func (c *Client4) SearchTeamsForRetentionPolicy(ctx context.Context, policyID string, term string) ([]*Team, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.dataRetentionPolicyRoute(policyID)+"/teams/search", map[string]any{"term": term}) + if err != nil { + return nil, BuildResponse(r), err + } + return DecodeJSONFromResponse[[]*Team](r) +} + +// AddTeamsToRetentionPolicy will add the specified teams to the granular data retention policy +// with the specified ID. +func (c *Client4) AddTeamsToRetentionPolicy(ctx context.Context, policyID string, teamIDs []string) (*Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.dataRetentionPolicyRoute(policyID)+"/teams", teamIDs) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// RemoveTeamsFromRetentionPolicy will remove the specified teams from the granular data retention policy +// with the specified ID. +func (c *Client4) RemoveTeamsFromRetentionPolicy(ctx context.Context, policyID string, teamIDs []string) (*Response, error) { + r, err := c.DoAPIDeleteJSON(ctx, c.dataRetentionPolicyRoute(policyID)+"/teams", teamIDs) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// GetChannelsForRetentionPolicy will get the channels to which the specified policy is currently applied. +func (c *Client4) GetChannelsForRetentionPolicy(ctx context.Context, policyID string, page, perPage int) (*ChannelsWithCount, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, c.dataRetentionPolicyRoute(policyID)+"/channels?"+values.Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + return DecodeJSONFromResponse[*ChannelsWithCount](r) +} + +// SearchChannelsForRetentionPolicy will search the channels to which the specified policy is currently applied. +func (c *Client4) SearchChannelsForRetentionPolicy(ctx context.Context, policyID string, term string) (ChannelListWithTeamData, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.dataRetentionPolicyRoute(policyID)+"/channels/search", map[string]any{"term": term}) + if err != nil { + return nil, BuildResponse(r), err + } + return DecodeJSONFromResponse[ChannelListWithTeamData](r) +} + +// AddChannelsToRetentionPolicy will add the specified channels to the granular data retention policy +// with the specified ID. +func (c *Client4) AddChannelsToRetentionPolicy(ctx context.Context, policyID string, channelIDs []string) (*Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.dataRetentionPolicyRoute(policyID)+"/channels", channelIDs) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// RemoveChannelsFromRetentionPolicy will remove the specified channels from the granular data retention policy +// with the specified ID. +func (c *Client4) RemoveChannelsFromRetentionPolicy(ctx context.Context, policyID string, channelIDs []string) (*Response, error) { + r, err := c.DoAPIDeleteJSON(ctx, c.dataRetentionPolicyRoute(policyID)+"/channels", channelIDs) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// GetTeamPoliciesForUser will get the data retention policies for the teams to which a user belongs. +func (c *Client4) GetTeamPoliciesForUser(ctx context.Context, userID string, offset, limit int) (*RetentionPolicyForTeamList, *Response, error) { + r, err := c.DoAPIGet(ctx, c.userRoute(userID)+"/data_retention/team_policies", "") + if err != nil { + return nil, BuildResponse(r), err + } + return DecodeJSONFromResponse[*RetentionPolicyForTeamList](r) +} + +// GetChannelPoliciesForUser will get the data retention policies for the channels to which a user belongs. +func (c *Client4) GetChannelPoliciesForUser(ctx context.Context, userID string, offset, limit int) (*RetentionPolicyForChannelList, *Response, error) { + r, err := c.DoAPIGet(ctx, c.userRoute(userID)+"/data_retention/channel_policies", "") + if err != nil { + return nil, BuildResponse(r), err + } + return DecodeJSONFromResponse[*RetentionPolicyForChannelList](r) +} + +// Drafts Sections + +// UpsertDraft will create a new draft or update a draft if it already exists +func (c *Client4) UpsertDraft(ctx context.Context, draft *Draft) (*Draft, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.draftsRoute(), draft) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Draft](r) +} + +// GetDrafts will get all drafts for a user +func (c *Client4) GetDrafts(ctx context.Context, userId, teamId string) ([]*Draft, *Response, error) { + r, err := c.DoAPIGet(ctx, c.userRoute(userId)+c.teamRoute(teamId)+"/drafts", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*Draft](r) +} + +func (c *Client4) DeleteDraft(ctx context.Context, userId, channelId, rootId string) (*Draft, *Response, error) { + r, err := c.DoAPIDelete(ctx, c.userRoute(userId)+c.channelRoute(channelId)+"/drafts") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Draft](r) +} + +// Commands Section + +// CreateCommand will create a new command if the user have the right permissions. +func (c *Client4) CreateCommand(ctx context.Context, cmd *Command) (*Command, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.commandsRoute(), cmd) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Command](r) +} + +// UpdateCommand updates a command based on the provided Command struct. +func (c *Client4) UpdateCommand(ctx context.Context, cmd *Command) (*Command, *Response, error) { + r, err := c.DoAPIPutJSON(ctx, c.commandRoute(cmd.Id), cmd) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Command](r) +} + +// MoveCommand moves a command to a different team. +func (c *Client4) MoveCommand(ctx context.Context, teamId string, commandId string) (*Response, error) { + cmr := CommandMoveRequest{TeamId: teamId} + r, err := c.DoAPIPutJSON(ctx, c.commandMoveRoute(commandId), cmr) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// DeleteCommand deletes a command based on the provided command id string. +func (c *Client4) DeleteCommand(ctx context.Context, commandId string) (*Response, error) { + r, err := c.DoAPIDelete(ctx, c.commandRoute(commandId)) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// ListCommands will retrieve a list of commands available in the team. +func (c *Client4) ListCommands(ctx context.Context, teamId string, customOnly bool) ([]*Command, *Response, error) { + values := url.Values{} + values.Set("team_id", teamId) + values.Set("custom_only", c.boolString(customOnly)) + r, err := c.DoAPIGet(ctx, c.commandsRoute()+"?"+values.Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*Command](r) +} + +// ListCommandAutocompleteSuggestions will retrieve a list of suggestions for a userInput. +func (c *Client4) ListCommandAutocompleteSuggestions(ctx context.Context, userInput, teamId string) ([]AutocompleteSuggestion, *Response, error) { + values := url.Values{} + values.Set("user_input", userInput) + r, err := c.DoAPIGet(ctx, c.teamRoute(teamId)+"/commands/autocomplete_suggestions?"+values.Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]AutocompleteSuggestion](r) +} + +// GetCommandById will retrieve a command by id. +func (c *Client4) GetCommandById(ctx context.Context, cmdId string) (*Command, *Response, error) { + url := fmt.Sprintf("%s/%s", c.commandsRoute(), cmdId) + r, err := c.DoAPIGet(ctx, url, "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Command](r) +} + +// ExecuteCommand executes a given slash command. +func (c *Client4) ExecuteCommand(ctx context.Context, channelId, command string) (*CommandResponse, *Response, error) { + commandArgs := &CommandArgs{ + ChannelId: channelId, + Command: command, + } + r, err := c.DoAPIPostJSON(ctx, c.commandsRoute()+"/execute", commandArgs) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + + response, err := CommandResponseFromJSON(r.Body) + if err != nil { + return nil, BuildResponse(r), fmt.Errorf("failed to decode command response: %w", err) + } + return response, BuildResponse(r), nil +} + +// ExecuteCommandWithTeam executes a given slash command against the specified team. +// Use this when executing slash commands in a DM/GM, since the team id cannot be inferred in that case. +func (c *Client4) ExecuteCommandWithTeam(ctx context.Context, channelId, teamId, command string) (*CommandResponse, *Response, error) { + commandArgs := &CommandArgs{ + ChannelId: channelId, + TeamId: teamId, + Command: command, + } + r, err := c.DoAPIPostJSON(ctx, c.commandsRoute()+"/execute", commandArgs) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + + response, err := CommandResponseFromJSON(r.Body) + if err != nil { + return nil, BuildResponse(r), fmt.Errorf("failed to decode command response: %w", err) + } + return response, BuildResponse(r), nil +} + +// ListAutocompleteCommands will retrieve a list of commands available in the team. +func (c *Client4) ListAutocompleteCommands(ctx context.Context, teamId string) ([]*Command, *Response, error) { + r, err := c.DoAPIGet(ctx, c.teamAutoCompleteCommandsRoute(teamId), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*Command](r) +} + +// RegenCommandToken will create a new token if the user have the right permissions. +func (c *Client4) RegenCommandToken(ctx context.Context, commandId string) (string, *Response, error) { + r, err := c.DoAPIPut(ctx, c.commandRoute(commandId)+"/regen_token", "") + if err != nil { + return "", BuildResponse(r), err + } + defer closeBody(r) + result, resp, err := DecodeJSONFromResponse[map[string]string](r) + if err != nil { + return "", resp, err + } + return result["token"], resp, nil +} + +// Status Section + +// GetUserStatus returns a user based on the provided user id string. +func (c *Client4) GetUserStatus(ctx context.Context, userId, etag string) (*Status, *Response, error) { + r, err := c.DoAPIGet(ctx, c.userStatusRoute(userId), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Status](r) +} + +// GetUsersStatusesByIds returns a list of users status based on the provided user ids. +func (c *Client4) GetUsersStatusesByIds(ctx context.Context, userIds []string) ([]*Status, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.userStatusesRoute()+"/ids", userIds) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*Status](r) +} + +// UpdateUserStatus sets a user's status based on the provided user id string. +func (c *Client4) UpdateUserStatus(ctx context.Context, userId string, userStatus *Status) (*Status, *Response, error) { + r, err := c.DoAPIPutJSON(ctx, c.userStatusRoute(userId), userStatus) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Status](r) +} + +// UpdateUserCustomStatus sets a user's custom status based on the provided user id string. +// The returned CustomStatus object is the same as the one passed, and it should be just +// ignored. It's only kept to maintain compatibility. +func (c *Client4) UpdateUserCustomStatus(ctx context.Context, userId string, userCustomStatus *CustomStatus) (*CustomStatus, *Response, error) { + r, err := c.DoAPIPutJSON(ctx, c.userStatusRoute(userId)+"/custom", userCustomStatus) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + // This is returning the same status which was passed. + // The API was incorrectly designed to return a status returned from the server, + // but the server doesn't return anything except an OK. + return userCustomStatus, BuildResponse(r), nil +} + +// RemoveUserCustomStatus remove a user's custom status based on the provided user id string. +func (c *Client4) RemoveUserCustomStatus(ctx context.Context, userId string) (*Response, error) { + r, err := c.DoAPIDelete(ctx, c.userStatusRoute(userId)+"/custom") + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// RemoveRecentUserCustomStatus remove a recent user's custom status based on the provided user id string. +func (c *Client4) RemoveRecentUserCustomStatus(ctx context.Context, userId string) (*Response, error) { + r, err := c.DoAPIDelete(ctx, c.userStatusRoute(userId)+"/custom/recent") + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// Emoji Section + +// CreateEmoji will save an emoji to the server if the current user has permission +// to do so. If successful, the provided emoji will be returned with its Id field +// filled in. Otherwise, an error will be returned. +func (c *Client4) CreateEmoji(ctx context.Context, emoji *Emoji, image []byte, filename string) (*Emoji, *Response, error) { + body := &bytes.Buffer{} + writer := multipart.NewWriter(body) + + part, err := writer.CreateFormFile("image", filename) + if err != nil { + return nil, nil, err + } + + _, err = io.Copy(part, bytes.NewBuffer(image)) + if err != nil { + return nil, nil, err + } + + emojiJSON, err := json.Marshal(emoji) + if err != nil { + return nil, nil, fmt.Errorf("failed to marshal emoji data: %w", err) + } + + if err = writer.WriteField("emoji", string(emojiJSON)); err != nil { + return nil, nil, err + } + + if err = writer.Close(); err != nil { + return nil, nil, err + } + + r, err := c.doAPIRequestReader(ctx, http.MethodPost, c.APIURL+c.emojisRoute(), writer.FormDataContentType(), body, nil) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Emoji](r) +} + +// GetEmojiList returns a page of custom emoji on the system. +func (c *Client4) GetEmojiList(ctx context.Context, page, perPage int) ([]*Emoji, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, c.emojisRoute()+"?"+values.Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*Emoji](r) +} + +// GetSortedEmojiList returns a page of custom emoji on the system sorted based on the sort +// parameter, blank for no sorting and "name" to sort by emoji names. +func (c *Client4) GetSortedEmojiList(ctx context.Context, page, perPage int, sort string) ([]*Emoji, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + values.Set("sort", sort) + r, err := c.DoAPIGet(ctx, c.emojisRoute()+"?"+values.Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*Emoji](r) +} + +// GetEmojisByNames takes an array of custom emoji names and returns an array of those emojis. +func (c *Client4) GetEmojisByNames(ctx context.Context, names []string) ([]*Emoji, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.emojisRoute()+"/names", names) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*Emoji](r) +} + +// DeleteEmoji delete an custom emoji on the provided emoji id string. +func (c *Client4) DeleteEmoji(ctx context.Context, emojiId string) (*Response, error) { + r, err := c.DoAPIDelete(ctx, c.emojiRoute(emojiId)) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// GetEmoji returns a custom emoji based on the emojiId string. +func (c *Client4) GetEmoji(ctx context.Context, emojiId string) (*Emoji, *Response, error) { + r, err := c.DoAPIGet(ctx, c.emojiRoute(emojiId), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Emoji](r) +} + +// GetEmojiByName returns a custom emoji based on the name string. +func (c *Client4) GetEmojiByName(ctx context.Context, name string) (*Emoji, *Response, error) { + r, err := c.DoAPIGet(ctx, c.emojiByNameRoute(name), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Emoji](r) +} + +// GetEmojiImage returns the emoji image. +func (c *Client4) GetEmojiImage(ctx context.Context, emojiId string) ([]byte, *Response, error) { + r, err := c.DoAPIGet(ctx, c.emojiRoute(emojiId)+"/image", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return ReadBytesFromResponse(r) +} + +// SearchEmoji returns a list of emoji matching some search criteria. +func (c *Client4) SearchEmoji(ctx context.Context, search *EmojiSearch) ([]*Emoji, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.emojisRoute()+"/search", search) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*Emoji](r) +} + +// AutocompleteEmoji returns a list of emoji starting with or matching name. +func (c *Client4) AutocompleteEmoji(ctx context.Context, name string, etag string) ([]*Emoji, *Response, error) { + values := url.Values{} + values.Set("name", name) + r, err := c.DoAPIGet(ctx, c.emojisRoute()+"/autocomplete?"+values.Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*Emoji](r) +} + +// Reaction Section + +// SaveReaction saves an emoji reaction for a post. Returns the saved reaction if successful, otherwise an error will be returned. +func (c *Client4) SaveReaction(ctx context.Context, reaction *Reaction) (*Reaction, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.reactionsRoute(), reaction) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Reaction](r) +} + +// GetReactions returns a list of reactions to a post. +func (c *Client4) GetReactions(ctx context.Context, postId string) ([]*Reaction, *Response, error) { + r, err := c.DoAPIGet(ctx, c.postRoute(postId)+"/reactions", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*Reaction](r) +} + +// DeleteReaction deletes reaction of a user in a post. +func (c *Client4) DeleteReaction(ctx context.Context, reaction *Reaction) (*Response, error) { + r, err := c.DoAPIDelete(ctx, c.userRoute(reaction.UserId)+c.postRoute(reaction.PostId)+fmt.Sprintf("/reactions/%v", reaction.EmojiName)) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// FetchBulkReactions returns a map of postIds and corresponding reactions +func (c *Client4) GetBulkReactions(ctx context.Context, postIds []string) (map[string][]*Reaction, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.postsRoute()+"/ids/reactions", postIds) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[map[string][]*Reaction](r) +} + +// Timezone Section + +// GetSupportedTimezone returns a page of supported timezones on the system. +func (c *Client4) GetSupportedTimezone(ctx context.Context) ([]string, *Response, error) { + r, err := c.DoAPIGet(ctx, c.timezonesRoute(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]string](r) +} + +// Jobs Section + +// GetJob gets a single job. +func (c *Client4) GetJob(ctx context.Context, id string) (*Job, *Response, error) { + r, err := c.DoAPIGet(ctx, c.jobsRoute()+fmt.Sprintf("/%v", id), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Job](r) +} + +// GetJobs gets all jobs, sorted with the job that was created most recently first. +func (c *Client4) GetJobs(ctx context.Context, jobType string, status string, page int, perPage int) ([]*Job, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + values.Set("job_type", jobType) + values.Set("status", status) + r, err := c.DoAPIGet(ctx, c.jobsRoute()+"?"+values.Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*Job](r) +} + +// GetJobsByType gets all jobs of a given type, sorted with the job that was created most recently first. +func (c *Client4) GetJobsByType(ctx context.Context, jobType string, page int, perPage int) ([]*Job, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, c.jobsRoute()+"/type/"+jobType+"?"+values.Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*Job](r) +} + +// CreateJob creates a job based on the provided job struct. +func (c *Client4) CreateJob(ctx context.Context, job *Job) (*Job, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.jobsRoute(), job) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Job](r) +} + +// CancelJob requests the cancellation of the job with the provided Id. +func (c *Client4) CancelJob(ctx context.Context, jobId string) (*Response, error) { + r, err := c.DoAPIPost(ctx, c.jobsRoute()+fmt.Sprintf("/%v/cancel", jobId), "") + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// DownloadJob downloads the results of the job +func (c *Client4) DownloadJob(ctx context.Context, jobId string) ([]byte, *Response, error) { + r, err := c.DoAPIGet(ctx, c.jobsRoute()+fmt.Sprintf("/%v/download", jobId), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return ReadBytesFromResponse(r) +} + +// UpdateJobStatus updates the status of a job +func (c *Client4) UpdateJobStatus(ctx context.Context, jobId string, status string, force bool) (*Response, error) { + data := map[string]any{ + "status": status, + "force": force, + } + r, err := c.DoAPIPatchJSON(ctx, c.jobsRoute()+fmt.Sprintf("/%v/status", jobId), data) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// Roles Section + +// GetAllRoles returns a list of all the roles. +func (c *Client4) GetAllRoles(ctx context.Context) ([]*Role, *Response, error) { + r, err := c.DoAPIGet(ctx, c.rolesRoute(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*Role](r) +} + +// GetRole gets a single role by ID. +func (c *Client4) GetRole(ctx context.Context, id string) (*Role, *Response, error) { + r, err := c.DoAPIGet(ctx, c.rolesRoute()+fmt.Sprintf("/%v", id), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Role](r) +} + +// GetRoleByName gets a single role by Name. +func (c *Client4) GetRoleByName(ctx context.Context, name string) (*Role, *Response, error) { + r, err := c.DoAPIGet(ctx, c.rolesRoute()+fmt.Sprintf("/name/%v", name), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Role](r) +} + +// GetRolesByNames returns a list of roles based on the provided role names. +func (c *Client4) GetRolesByNames(ctx context.Context, roleNames []string) ([]*Role, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.rolesRoute()+"/names", roleNames) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*Role](r) +} + +// PatchRole partially updates a role in the system. Any missing fields are not updated. +func (c *Client4) PatchRole(ctx context.Context, roleId string, patch *RolePatch) (*Role, *Response, error) { + r, err := c.DoAPIPutJSON(ctx, c.rolesRoute()+fmt.Sprintf("/%v/patch", roleId), patch) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Role](r) +} + +// Schemes Section + +// CreateScheme creates a new Scheme. +func (c *Client4) CreateScheme(ctx context.Context, scheme *Scheme) (*Scheme, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.schemesRoute(), scheme) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Scheme](r) +} + +// GetScheme gets a single scheme by ID. +func (c *Client4) GetScheme(ctx context.Context, id string) (*Scheme, *Response, error) { + r, err := c.DoAPIGet(ctx, c.schemeRoute(id), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Scheme](r) +} + +// GetSchemes ets all schemes, sorted with the most recently created first, optionally filtered by scope. +func (c *Client4) GetSchemes(ctx context.Context, scope string, page int, perPage int) ([]*Scheme, *Response, error) { + values := url.Values{} + values.Set("scope", scope) + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, c.schemesRoute()+"?"+values.Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*Scheme](r) +} + +// DeleteScheme deletes a single scheme by ID. +func (c *Client4) DeleteScheme(ctx context.Context, id string) (*Response, error) { + r, err := c.DoAPIDelete(ctx, c.schemeRoute(id)) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// PatchScheme partially updates a scheme in the system. Any missing fields are not updated. +func (c *Client4) PatchScheme(ctx context.Context, id string, patch *SchemePatch) (*Scheme, *Response, error) { + r, err := c.DoAPIPutJSON(ctx, c.schemeRoute(id)+"/patch", patch) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Scheme](r) +} + +// GetTeamsForScheme gets the teams using this scheme, sorted alphabetically by display name. +func (c *Client4) GetTeamsForScheme(ctx context.Context, schemeId string, page int, perPage int) ([]*Team, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, c.schemeRoute(schemeId)+"/teams?"+values.Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*Team](r) +} + +// GetChannelsForScheme gets the channels using this scheme, sorted alphabetically by display name. +func (c *Client4) GetChannelsForScheme(ctx context.Context, schemeId string, page int, perPage int) (ChannelList, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, c.schemeRoute(schemeId)+"/channels?"+values.Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[ChannelList](r) +} + +// Plugin Section + +// UploadPlugin takes an io.Reader stream pointing to the contents of a .tar.gz plugin. +func (c *Client4) UploadPlugin(ctx context.Context, file io.Reader) (*Manifest, *Response, error) { + return c.uploadPlugin(ctx, file, false) +} + +func (c *Client4) UploadPluginForced(ctx context.Context, file io.Reader) (*Manifest, *Response, error) { + return c.uploadPlugin(ctx, file, true) +} + +func (c *Client4) uploadPlugin(ctx context.Context, file io.Reader, force bool) (*Manifest, *Response, error) { + body := new(bytes.Buffer) + writer := multipart.NewWriter(body) + + if force { + err := writer.WriteField("force", c.boolString(true)) + if err != nil { + return nil, nil, err + } + } + + part, err := writer.CreateFormFile("plugin", "plugin.tar.gz") + if err != nil { + return nil, nil, err + } + + if _, err = io.Copy(part, file); err != nil { + return nil, nil, err + } + + if err = writer.Close(); err != nil { + return nil, nil, err + } + + r, err := c.doAPIRequestReader(ctx, http.MethodPost, c.APIURL+c.pluginsRoute(), writer.FormDataContentType(), body, nil) + if err != nil { + return nil, BuildResponse(r), err + } + + return DecodeJSONFromResponse[*Manifest](r) +} + +func (c *Client4) InstallPluginFromURL(ctx context.Context, downloadURL string, force bool) (*Manifest, *Response, error) { + values := url.Values{} + values.Set("plugin_download_url", downloadURL) + values.Set("force", c.boolString(force)) + url := c.pluginsRoute() + "/install_from_url?" + values.Encode() + r, err := c.DoAPIPost(ctx, url, "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Manifest](r) +} + +// InstallMarketplacePlugin will install marketplace plugin. +func (c *Client4) InstallMarketplacePlugin(ctx context.Context, request *InstallMarketplacePluginRequest) (*Manifest, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.pluginsRoute()+"/marketplace", request) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Manifest](r) +} + +// ReattachPlugin asks the server to reattach to a plugin launched by another process. +// +// Only available in local mode, and currently only used for testing. +func (c *Client4) ReattachPlugin(ctx context.Context, request *PluginReattachRequest) (*Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.pluginsRoute()+"/reattach", request) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + + return BuildResponse(r), nil +} + +// DetachPlugin detaches a previously reattached plugin. +// +// Only available in local mode, and currently only used for testing. +func (c *Client4) DetachPlugin(ctx context.Context, pluginID string) (*Response, error) { + r, err := c.DoAPIPost(ctx, c.pluginRoute(pluginID)+"/detach", "") + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + + return BuildResponse(r), nil +} + +// GetPlugins will return a list of plugin manifests for currently active plugins. +func (c *Client4) GetPlugins(ctx context.Context) (*PluginsResponse, *Response, error) { + r, err := c.DoAPIGet(ctx, c.pluginsRoute(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*PluginsResponse](r) +} + +// GetPluginStatuses will return the plugins installed on any server in the cluster, for reporting +// to the administrator via the system console. +func (c *Client4) GetPluginStatuses(ctx context.Context) (PluginStatuses, *Response, error) { + r, err := c.DoAPIGet(ctx, c.pluginsRoute()+"/statuses", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[PluginStatuses](r) +} + +// RemovePlugin will disable and delete a plugin. +func (c *Client4) RemovePlugin(ctx context.Context, id string) (*Response, error) { + r, err := c.DoAPIDelete(ctx, c.pluginRoute(id)) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// GetWebappPlugins will return a list of plugins that the webapp should download. +func (c *Client4) GetWebappPlugins(ctx context.Context) ([]*Manifest, *Response, error) { + r, err := c.DoAPIGet(ctx, c.pluginsRoute()+"/webapp", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*Manifest](r) +} + +// EnablePlugin will enable an plugin installed. +func (c *Client4) EnablePlugin(ctx context.Context, id string) (*Response, error) { + r, err := c.DoAPIPost(ctx, c.pluginRoute(id)+"/enable", "") + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// DisablePlugin will disable an enabled plugin. +func (c *Client4) DisablePlugin(ctx context.Context, id string) (*Response, error) { + r, err := c.DoAPIPost(ctx, c.pluginRoute(id)+"/disable", "") + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// GetMarketplacePlugins will return a list of plugins that an admin can install. +func (c *Client4) GetMarketplacePlugins(ctx context.Context, filter *MarketplacePluginFilter) ([]*MarketplacePlugin, *Response, error) { + route := c.pluginsRoute() + "/marketplace" + u, err := url.Parse(route) + if err != nil { + return nil, nil, err + } + + filter.ApplyToURL(u) + + r, err := c.DoAPIGet(ctx, u.String(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + + plugins, err := MarketplacePluginsFromReader(r.Body) + if err != nil { + return nil, BuildResponse(r), fmt.Errorf("failed to parse marketplace plugins response: %w", err) + } + + return plugins, BuildResponse(r), nil +} + +// UpdateChannelScheme will update a channel's scheme. +func (c *Client4) UpdateChannelScheme(ctx context.Context, channelId, schemeId string) (*Response, error) { + sip := &SchemeIDPatch{SchemeID: &schemeId} + r, err := c.DoAPIPutJSON(ctx, c.channelSchemeRoute(channelId), sip) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// UpdateTeamScheme will update a team's scheme. +func (c *Client4) UpdateTeamScheme(ctx context.Context, teamId, schemeId string) (*Response, error) { + sip := &SchemeIDPatch{SchemeID: &schemeId} + r, err := c.DoAPIPutJSON(ctx, c.teamSchemeRoute(teamId), sip) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// GetRedirectLocation retrieves the value of the 'Location' header of an HTTP response for a given URL. +func (c *Client4) GetRedirectLocation(ctx context.Context, urlParam, etag string) (string, *Response, error) { + values := url.Values{} + values.Set("url", urlParam) + url := c.redirectLocationRoute() + "?" + values.Encode() + r, err := c.DoAPIGet(ctx, url, etag) + if err != nil { + return "", BuildResponse(r), err + } + defer closeBody(r) + result, resp, err := DecodeJSONFromResponse[map[string]string](r) + if err != nil { + return "", resp, err + } + return result["location"], resp, nil +} + +// SetServerBusy will mark the server as busy, which disables non-critical services for `secs` seconds. +func (c *Client4) SetServerBusy(ctx context.Context, secs int) (*Response, error) { + values := url.Values{} + values.Set("seconds", strconv.Itoa(secs)) + url := c.serverBusyRoute() + "?" + values.Encode() + r, err := c.DoAPIPost(ctx, url, "") + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// ClearServerBusy will mark the server as not busy. +func (c *Client4) ClearServerBusy(ctx context.Context) (*Response, error) { + r, err := c.DoAPIDelete(ctx, c.serverBusyRoute()) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// GetServerBusy returns the current ServerBusyState including the time when a server marked busy +// will automatically have the flag cleared. +func (c *Client4) GetServerBusy(ctx context.Context) (*ServerBusyState, *Response, error) { + r, err := c.DoAPIGet(ctx, c.serverBusyRoute(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*ServerBusyState](r) +} + +// RegisterTermsOfServiceAction saves action performed by a user against a specific terms of service. +func (c *Client4) RegisterTermsOfServiceAction(ctx context.Context, userId, termsOfServiceId string, accepted bool) (*Response, error) { + url := c.userTermsOfServiceRoute(userId) + data := map[string]any{"termsOfServiceId": termsOfServiceId, "accepted": accepted} + r, err := c.DoAPIPostJSON(ctx, url, data) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// GetTermsOfService fetches the latest terms of service +func (c *Client4) GetTermsOfService(ctx context.Context, etag string) (*TermsOfService, *Response, error) { + url := c.termsOfServiceRoute() + r, err := c.DoAPIGet(ctx, url, etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*TermsOfService](r) +} + +// GetUserTermsOfService fetches user's latest terms of service action if the latest action was for acceptance. +func (c *Client4) GetUserTermsOfService(ctx context.Context, userId, etag string) (*UserTermsOfService, *Response, error) { + url := c.userTermsOfServiceRoute(userId) + r, err := c.DoAPIGet(ctx, url, etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*UserTermsOfService](r) +} + +// CreateTermsOfService creates new terms of service. +func (c *Client4) CreateTermsOfService(ctx context.Context, text, userId string) (*TermsOfService, *Response, error) { + url := c.termsOfServiceRoute() + data := map[string]any{"text": text} + r, err := c.DoAPIPostJSON(ctx, url, data) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*TermsOfService](r) +} + +func (c *Client4) GetGroup(ctx context.Context, groupID, etag string) (*Group, *Response, error) { + r, err := c.DoAPIGet(ctx, c.groupRoute(groupID), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Group](r) +} + +func (c *Client4) CreateGroup(ctx context.Context, group *Group) (*Group, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, "/groups", group) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Group](r) +} + +func (c *Client4) DeleteGroup(ctx context.Context, groupID string) (*Group, *Response, error) { + r, err := c.DoAPIDelete(ctx, c.groupRoute(groupID)) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Group](r) +} + +func (c *Client4) RestoreGroup(ctx context.Context, groupID string, etag string) (*Group, *Response, error) { + r, err := c.DoAPIPost(ctx, c.groupRoute(groupID)+"/restore", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Group](r) +} + +func (c *Client4) PatchGroup(ctx context.Context, groupID string, patch *GroupPatch) (*Group, *Response, error) { + r, err := c.DoAPIPutJSON(ctx, c.groupRoute(groupID)+"/patch", patch) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Group](r) +} + +func (c *Client4) GetGroupMembers(ctx context.Context, groupID string) (*GroupMemberList, *Response, error) { + r, err := c.DoAPIGet(ctx, c.groupRoute(groupID)+"/members", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*GroupMemberList](r) +} + +func (c *Client4) UpsertGroupMembers(ctx context.Context, groupID string, userIds *GroupModifyMembers) ([]*GroupMember, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.groupRoute(groupID)+"/members", userIds) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*GroupMember](r) +} + +func (c *Client4) DeleteGroupMembers(ctx context.Context, groupID string, userIds *GroupModifyMembers) ([]*GroupMember, *Response, error) { + r, err := c.DoAPIDeleteJSON(ctx, c.groupRoute(groupID)+"/members", userIds) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*GroupMember](r) +} + +func (c *Client4) LinkGroupSyncable(ctx context.Context, groupID, syncableID string, syncableType GroupSyncableType, patch *GroupSyncablePatch) (*GroupSyncable, *Response, error) { + url := fmt.Sprintf("%s/link", c.groupSyncableRoute(groupID, syncableID, syncableType)) + r, err := c.DoAPIPostJSON(ctx, url, patch) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*GroupSyncable](r) +} + +func (c *Client4) UnlinkGroupSyncable(ctx context.Context, groupID, syncableID string, syncableType GroupSyncableType) (*Response, error) { + url := fmt.Sprintf("%s/link", c.groupSyncableRoute(groupID, syncableID, syncableType)) + r, err := c.DoAPIDelete(ctx, url) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +func (c *Client4) GetGroupSyncable(ctx context.Context, groupID, syncableID string, syncableType GroupSyncableType, etag string) (*GroupSyncable, *Response, error) { + r, err := c.DoAPIGet(ctx, c.groupSyncableRoute(groupID, syncableID, syncableType), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*GroupSyncable](r) +} + +func (c *Client4) GetGroupSyncables(ctx context.Context, groupID string, syncableType GroupSyncableType, etag string) ([]*GroupSyncable, *Response, error) { + r, err := c.DoAPIGet(ctx, c.groupSyncablesRoute(groupID, syncableType), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*GroupSyncable](r) +} + +func (c *Client4) PatchGroupSyncable(ctx context.Context, groupID, syncableID string, syncableType GroupSyncableType, patch *GroupSyncablePatch) (*GroupSyncable, *Response, error) { + r, err := c.DoAPIPutJSON(ctx, c.groupSyncableRoute(groupID, syncableID, syncableType)+"/patch", patch) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*GroupSyncable](r) +} + +func (c *Client4) TeamMembersMinusGroupMembers(ctx context.Context, teamID string, groupIDs []string, page, perPage int, etag string) ([]*UserWithGroups, int64, *Response, error) { + groupIDStr := strings.Join(groupIDs, ",") + values := url.Values{} + values.Set("group_ids", groupIDStr) + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, c.teamRoute(teamID)+"/members_minus_group_members?"+values.Encode(), etag) + if err != nil { + return nil, 0, BuildResponse(r), err + } + defer closeBody(r) + + ugc, resp, err := DecodeJSONFromResponse[UsersWithGroupsAndCount](r) + if err != nil { + return nil, 0, nil, err + } + return ugc.Users, ugc.Count, resp, nil +} + +func (c *Client4) ChannelMembersMinusGroupMembers(ctx context.Context, channelID string, groupIDs []string, page, perPage int, etag string) ([]*UserWithGroups, int64, *Response, error) { + groupIDStr := strings.Join(groupIDs, ",") + values := url.Values{} + values.Set("group_ids", groupIDStr) + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, c.channelRoute(channelID)+"/members_minus_group_members?"+values.Encode(), etag) + if err != nil { + return nil, 0, BuildResponse(r), err + } + defer closeBody(r) + ugc, resp, err := DecodeJSONFromResponse[UsersWithGroupsAndCount](r) + if err != nil { + return nil, 0, nil, err + } + return ugc.Users, ugc.Count, resp, nil +} + +func (c *Client4) PatchConfig(ctx context.Context, config *Config) (*Config, *Response, error) { + r, err := c.DoAPIPutJSON(ctx, c.configRoute()+"/patch", config) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Config](r) +} + +func (c *Client4) GetChannelModerations(ctx context.Context, channelID string, etag string) ([]*ChannelModeration, *Response, error) { + r, err := c.DoAPIGet(ctx, c.channelRoute(channelID)+"/moderations", etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*ChannelModeration](r) +} + +func (c *Client4) PatchChannelModerations(ctx context.Context, channelID string, patch []*ChannelModerationPatch) ([]*ChannelModeration, *Response, error) { + r, err := c.DoAPIPutJSON(ctx, c.channelRoute(channelID)+"/moderations/patch", patch) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*ChannelModeration](r) +} + +func (c *Client4) GetKnownUsers(ctx context.Context) ([]string, *Response, error) { + r, err := c.DoAPIGet(ctx, c.usersRoute()+"/known", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]string](r) +} + +// PublishUserTyping publishes a user is typing websocket event based on the provided TypingRequest. +func (c *Client4) PublishUserTyping(ctx context.Context, userID string, typingRequest TypingRequest) (*Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.publishUserTypingRoute(userID), typingRequest) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +func (c *Client4) GetChannelMemberCountsByGroup(ctx context.Context, channelID string, includeTimezones bool, etag string) ([]*ChannelMemberCountByGroup, *Response, error) { + values := url.Values{} + values.Set("include_timezones", c.boolString(includeTimezones)) + r, err := c.DoAPIGet(ctx, c.channelRoute(channelID)+"/member_counts_by_group?"+values.Encode(), etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*ChannelMemberCountByGroup](r) +} + +func (c *Client4) RequestTrialLicenseWithExtraFields(ctx context.Context, trialRequest *TrialLicenseRequest) (*Response, error) { + r, err := c.DoAPIPostJSON(ctx, "/trial-license", trialRequest) + if err != nil { + return BuildResponse(r), err + } + + defer closeBody(r) + return BuildResponse(r), nil +} + +// RequestTrialLicense will request a trial license and install it in the server +// DEPRECATED - USE RequestTrialLicenseWithExtraFields (this method remains for backwards compatibility) +func (c *Client4) RequestTrialLicense(ctx context.Context, users int) (*Response, error) { + reqData := map[string]any{"users": users, "terms_accepted": true} + r, err := c.DoAPIPostJSON(ctx, "/trial-license", reqData) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// GetGroupStats retrieves stats for a Mattermost Group +func (c *Client4) GetGroupStats(ctx context.Context, groupID string) (*GroupStats, *Response, error) { + r, err := c.DoAPIGet(ctx, c.groupRoute(groupID)+"/stats", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*GroupStats](r) +} + +func (c *Client4) GetSidebarCategoriesForTeamForUser(ctx context.Context, userID, teamID, etag string) (*OrderedSidebarCategories, *Response, error) { + route := c.userCategoryRoute(userID, teamID) + r, err := c.DoAPIGet(ctx, route, etag) + if err != nil { + return nil, BuildResponse(r), err + } + + return DecodeJSONFromResponse[*OrderedSidebarCategories](r) +} + +func (c *Client4) CreateSidebarCategoryForTeamForUser(ctx context.Context, userID, teamID string, category *SidebarCategoryWithChannels) (*SidebarCategoryWithChannels, *Response, error) { + route := c.userCategoryRoute(userID, teamID) + r, err := c.DoAPIPostJSON(ctx, route, category) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*SidebarCategoryWithChannels](r) +} + +func (c *Client4) UpdateSidebarCategoriesForTeamForUser(ctx context.Context, userID, teamID string, categories []*SidebarCategoryWithChannels) ([]*SidebarCategoryWithChannels, *Response, error) { + route := c.userCategoryRoute(userID, teamID) + + r, err := c.DoAPIPutJSON(ctx, route, categories) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*SidebarCategoryWithChannels](r) +} + +func (c *Client4) GetSidebarCategoryOrderForTeamForUser(ctx context.Context, userID, teamID, etag string) ([]string, *Response, error) { + route := c.userCategoryRoute(userID, teamID) + "/order" + r, err := c.DoAPIGet(ctx, route, etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]string](r) +} + +func (c *Client4) UpdateSidebarCategoryOrderForTeamForUser(ctx context.Context, userID, teamID string, order []string) ([]string, *Response, error) { + route := c.userCategoryRoute(userID, teamID) + "/order" + r, err := c.DoAPIPutJSON(ctx, route, order) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]string](r) +} + +func (c *Client4) GetSidebarCategoryForTeamForUser(ctx context.Context, userID, teamID, categoryID, etag string) (*SidebarCategoryWithChannels, *Response, error) { + route := c.userCategoryRoute(userID, teamID) + "/" + categoryID + r, err := c.DoAPIGet(ctx, route, etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*SidebarCategoryWithChannels](r) +} + +func (c *Client4) UpdateSidebarCategoryForTeamForUser(ctx context.Context, userID, teamID, categoryID string, category *SidebarCategoryWithChannels) (*SidebarCategoryWithChannels, *Response, error) { + route := c.userCategoryRoute(userID, teamID) + "/" + categoryID + r, err := c.DoAPIPutJSON(ctx, route, category) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*SidebarCategoryWithChannels](r) +} + +// DeleteSidebarCategoryForTeamForUser deletes a sidebar category for a user in a team. +func (c *Client4) DeleteSidebarCategoryForTeamForUser(ctx context.Context, userId string, teamId string, categoryId string) (*Response, error) { + url := fmt.Sprintf("%s/%s", c.userCategoryRoute(userId, teamId), categoryId) + r, err := c.DoAPIDelete(ctx, url) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// CheckIntegrity performs a database integrity check. +func (c *Client4) CheckIntegrity(ctx context.Context) ([]IntegrityCheckResult, *Response, error) { + r, err := c.DoAPIPost(ctx, "/integrity", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]IntegrityCheckResult](r) +} + +func (c *Client4) GetNotices(ctx context.Context, lastViewed int64, teamId string, client NoticeClientType, clientVersion, locale, etag string) (NoticeMessages, *Response, error) { + values := url.Values{} + values.Set("lastViewed", strconv.FormatInt(lastViewed, 10)) + values.Set("client", string(client)) + values.Set("clientVersion", clientVersion) + values.Set("locale", locale) + url := "/system/notices/" + teamId + "?" + values.Encode() + r, err := c.DoAPIGet(ctx, url, etag) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + notices, err := UnmarshalProductNoticeMessages(r.Body) + if err != nil { + return nil, BuildResponse(r), err + } + return notices, BuildResponse(r), nil +} + +func (c *Client4) MarkNoticesViewed(ctx context.Context, ids []string) (*Response, error) { + r, err := c.DoAPIPutJSON(ctx, "/system/notices/view", ids) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +func (c *Client4) CompleteOnboarding(ctx context.Context, request *CompleteOnboardingRequest) (*Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.systemRoute()+"/onboarding/complete", request) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + + return BuildResponse(r), nil +} + +// CreateUpload creates a new upload session. +func (c *Client4) CreateUpload(ctx context.Context, us *UploadSession) (*UploadSession, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.uploadsRoute(), us) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*UploadSession](r) +} + +// GetUpload returns the upload session for the specified uploadId. +func (c *Client4) GetUpload(ctx context.Context, uploadId string) (*UploadSession, *Response, error) { + r, err := c.DoAPIGet(ctx, c.uploadRoute(uploadId), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*UploadSession](r) +} + +// GetUploadsForUser returns the upload sessions created by the specified +// userId. +func (c *Client4) GetUploadsForUser(ctx context.Context, userId string) ([]*UploadSession, *Response, error) { + r, err := c.DoAPIGet(ctx, c.userRoute(userId)+"/uploads", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*UploadSession](r) +} + +// UploadData performs an upload. On success it returns +// a FileInfo object. +func (c *Client4) UploadData(ctx context.Context, uploadId string, data io.Reader) (*FileInfo, *Response, error) { + url := c.uploadRoute(uploadId) + r, err := c.doAPIRequestReader(ctx, http.MethodPost, c.APIURL+url, "", data, nil) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + if r.StatusCode == http.StatusNoContent { + return nil, BuildResponse(r), nil + } + return DecodeJSONFromResponse[*FileInfo](r) +} + +func (c *Client4) UpdatePassword(ctx context.Context, userId, currentPassword, newPassword string) (*Response, error) { + requestBody := map[string]string{"current_password": currentPassword, "new_password": newPassword} + r, err := c.DoAPIPutJSON(ctx, c.userRoute(userId)+"/password", requestBody) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +// Cloud Section + +func (c *Client4) GetCloudProducts(ctx context.Context) ([]*Product, *Response, error) { + r, err := c.DoAPIGet(ctx, c.cloudRoute()+"/products", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*Product](r) +} + +func (c *Client4) GetSelfHostedProducts(ctx context.Context) ([]*Product, *Response, error) { + r, err := c.DoAPIGet(ctx, c.cloudRoute()+"/products/selfhosted", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*Product](r) +} + +func (c *Client4) GetProductLimits(ctx context.Context) (*ProductLimits, *Response, error) { + r, err := c.DoAPIGet(ctx, c.cloudRoute()+"/limits", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*ProductLimits](r) +} + +func (c *Client4) GetIPFilters(ctx context.Context) (*AllowedIPRanges, *Response, error) { + r, err := c.DoAPIGet(ctx, c.ipFiltersRoute(), "") + if err != nil { + return nil, BuildResponse(r), err + } + + defer closeBody(r) + return DecodeJSONFromResponse[*AllowedIPRanges](r) +} + +func (c *Client4) ApplyIPFilters(ctx context.Context, allowedRanges *AllowedIPRanges) (*AllowedIPRanges, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.ipFiltersRoute(), allowedRanges) + if err != nil { + return nil, BuildResponse(r), err + } + + defer closeBody(r) + return DecodeJSONFromResponse[*AllowedIPRanges](r) +} + +func (c *Client4) GetMyIP(ctx context.Context) (*GetIPAddressResponse, *Response, error) { + r, err := c.DoAPIGet(ctx, c.ipFiltersRoute()+"/my_ip", "") + if err != nil { + return nil, BuildResponse(r), err + } + + defer closeBody(r) + return DecodeJSONFromResponse[*GetIPAddressResponse](r) +} + +func (c *Client4) ValidateWorkspaceBusinessEmail(ctx context.Context) (*Response, error) { + r, err := c.DoAPIPost(ctx, c.cloudRoute()+"/validate-workspace-business-email", "") + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + + return BuildResponse(r), nil +} + +func (c *Client4) NotifyAdmin(ctx context.Context, nr *NotifyAdminToUpgradeRequest) (int, error) { + r, err := c.DoAPIPostJSON(ctx, "/users/notify-admin", nr) + if err != nil { + return r.StatusCode, err + } + + closeBody(r) + + return r.StatusCode, nil +} + +func (c *Client4) TriggerNotifyAdmin(ctx context.Context, nr *NotifyAdminToUpgradeRequest) (int, error) { + r, err := c.DoAPIPostJSON(ctx, "/users/trigger-notify-admin-posts", nr) + if err != nil { + return r.StatusCode, err + } + + closeBody(r) + + return r.StatusCode, nil +} + +func (c *Client4) ValidateBusinessEmail(ctx context.Context, email *ValidateBusinessEmailRequest) (*Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.cloudRoute()+"/validate-business-email", email) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + + return BuildResponse(r), nil +} + +func (c *Client4) GetCloudCustomer(ctx context.Context) (*CloudCustomer, *Response, error) { + r, err := c.DoAPIGet(ctx, c.cloudRoute()+"/customer", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*CloudCustomer](r) +} + +func (c *Client4) GetSubscription(ctx context.Context) (*Subscription, *Response, error) { + r, err := c.DoAPIGet(ctx, c.cloudRoute()+"/subscription", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Subscription](r) +} + +func (c *Client4) GetInvoicesForSubscription(ctx context.Context) ([]*Invoice, *Response, error) { + r, err := c.DoAPIGet(ctx, c.cloudRoute()+"/subscription/invoices", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*Invoice](r) +} + +func (c *Client4) UpdateCloudCustomer(ctx context.Context, customerInfo *CloudCustomerInfo) (*CloudCustomer, *Response, error) { + r, err := c.DoAPIPutJSON(ctx, c.cloudRoute()+"/customer", customerInfo) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*CloudCustomer](r) +} + +func (c *Client4) UpdateCloudCustomerAddress(ctx context.Context, address *Address) (*CloudCustomer, *Response, error) { + r, err := c.DoAPIPutJSON(ctx, c.cloudRoute()+"/customer/address", address) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*CloudCustomer](r) +} + +func (c *Client4) ListImports(ctx context.Context) ([]string, *Response, error) { + r, err := c.DoAPIGet(ctx, c.importsRoute(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]string](r) +} + +func (c *Client4) DeleteImport(ctx context.Context, name string) (*Response, error) { + r, err := c.DoAPIDelete(ctx, c.importRoute(name)) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +func (c *Client4) ListExports(ctx context.Context) ([]string, *Response, error) { + r, err := c.DoAPIGet(ctx, c.exportsRoute(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]string](r) +} + +func (c *Client4) DeleteExport(ctx context.Context, name string) (*Response, error) { + r, err := c.DoAPIDelete(ctx, c.exportRoute(name)) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +func (c *Client4) DownloadExport(ctx context.Context, name string, wr io.Writer, offset int64) (int64, *Response, error) { + var headers map[string]string + if offset > 0 { + headers = map[string]string{ + HeaderRange: fmt.Sprintf("bytes=%d-", offset), + } + } + r, err := c.DoAPIRequestWithHeaders(ctx, http.MethodGet, c.APIURL+c.exportRoute(name), "", headers) + if err != nil { + return 0, BuildResponse(r), err + } + defer closeBody(r) + n, err := io.Copy(wr, r.Body) + if err != nil { + return n, BuildResponse(r), fmt.Errorf("failed to copy export data to writer: %w", err) + } + return n, BuildResponse(r), nil +} + +func (c *Client4) GeneratePresignedURL(ctx context.Context, name string) (*PresignURLResponse, *Response, error) { + r, err := c.doAPIRequest(ctx, http.MethodPost, c.APIURL+c.exportRoute(name)+"/presign-url", "", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*PresignURLResponse](r) +} + +func (c *Client4) GetUserThreads(ctx context.Context, userId, teamId string, options GetUserThreadsOpts) (*Threads, *Response, error) { + v := url.Values{} + if options.Since != 0 { + v.Set("since", fmt.Sprintf("%d", options.Since)) + } + if options.Before != "" { + v.Set("before", options.Before) + } + if options.After != "" { + v.Set("after", options.After) + } + if options.PageSize != 0 { + v.Set("per_page", fmt.Sprintf("%d", options.PageSize)) + } + if options.Extended { + v.Set("extended", "true") + } + if options.Deleted { + v.Set("deleted", "true") + } + if options.Unread { + v.Set("unread", "true") + } + if options.ThreadsOnly { + v.Set("threadsOnly", "true") + } + if options.TotalsOnly { + v.Set("totalsOnly", "true") + } + if options.ExcludeDirect { + v.Set("excludeDirect", fmt.Sprintf("%t", options.ExcludeDirect)) + } + url := c.userThreadsRoute(userId, teamId) + if len(v) > 0 { + url += "?" + v.Encode() + } + + r, err := c.DoAPIGet(ctx, url, "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*Threads](r) +} + +func (c *Client4) DownloadComplianceExport(ctx context.Context, jobId string, wr io.Writer) (string, error) { + r, err := c.DoAPIGet(ctx, c.jobsRoute()+fmt.Sprintf("/%s/download", jobId), "") + if err != nil { + return "", err + } + defer closeBody(r) + + // Try to get the filename from the Content-Disposition header + var filename string + if cd := r.Header.Get("Content-Disposition"); cd != "" { + var params map[string]string + if _, params, err = mime.ParseMediaType(cd); err == nil { + if params["filename"] != "" { + filename = params["filename"] + } + } + } + + _, err = io.Copy(wr, r.Body) + if err != nil { + return filename, fmt.Errorf("failed to copy compliance export data to writer: %w", err) + } + return filename, nil +} + +func (c *Client4) GetUserThread(ctx context.Context, userId, teamId, threadId string, extended bool) (*ThreadResponse, *Response, error) { + url := c.userThreadRoute(userId, teamId, threadId) + if extended { + url += "?extended=true" + } + r, err := c.DoAPIGet(ctx, url, "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*ThreadResponse](r) +} + +func (c *Client4) UpdateThreadsReadForUser(ctx context.Context, userId, teamId string) (*Response, error) { + r, err := c.DoAPIPut(ctx, fmt.Sprintf("%s/read", c.userThreadsRoute(userId, teamId)), "") + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + + return BuildResponse(r), nil +} + +func (c *Client4) SetThreadUnreadByPostId(ctx context.Context, userId, teamId, threadId, postId string) (*ThreadResponse, *Response, error) { + r, err := c.DoAPIPost(ctx, fmt.Sprintf("%s/set_unread/%s", c.userThreadRoute(userId, teamId, threadId), postId), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*ThreadResponse](r) +} + +func (c *Client4) UpdateThreadReadForUser(ctx context.Context, userId, teamId, threadId string, timestamp int64) (*ThreadResponse, *Response, error) { + r, err := c.DoAPIPut(ctx, fmt.Sprintf("%s/read/%d", c.userThreadRoute(userId, teamId, threadId), timestamp), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*ThreadResponse](r) +} + +func (c *Client4) UpdateThreadFollowForUser(ctx context.Context, userId, teamId, threadId string, state bool) (*Response, error) { + var err error + var r *http.Response + if state { + r, err = c.DoAPIPut(ctx, c.userThreadRoute(userId, teamId, threadId)+"/following", "") + } else { + r, err = c.DoAPIDelete(ctx, c.userThreadRoute(userId, teamId, threadId)+"/following") + } + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + + return BuildResponse(r), nil +} + +func (c *Client4) GetAllSharedChannels(ctx context.Context, teamID string, page, perPage int) ([]*SharedChannel, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + url := c.sharedChannelsRoute() + "/" + teamID + "?" + values.Encode() + r, err := c.DoAPIGet(ctx, url, "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*SharedChannel](r) +} + +func (c *Client4) GetRemoteClusterInfo(ctx context.Context, remoteID string) (RemoteClusterInfo, *Response, error) { + url := fmt.Sprintf("%s/remote_info/%s", c.sharedChannelsRoute(), remoteID) + r, err := c.DoAPIGet(ctx, url, "") + if err != nil { + return RemoteClusterInfo{}, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[RemoteClusterInfo](r) +} + +func (c *Client4) GetRemoteClusters(ctx context.Context, page, perPage int, filter RemoteClusterQueryFilter) ([]*RemoteCluster, *Response, error) { + v := url.Values{} + if page != 0 { + v.Set("page", fmt.Sprintf("%d", page)) + } + if perPage != 0 { + v.Set("per_page", fmt.Sprintf("%d", perPage)) + } + if filter.ExcludeOffline { + v.Set("exclude_offline", "true") + } + if filter.InChannel != "" { + v.Set("in_channel", filter.InChannel) + } + if filter.NotInChannel != "" { + v.Set("not_in_channel", filter.NotInChannel) + } + if filter.Topic != "" { + v.Set("topic", filter.Topic) + } + if filter.CreatorId != "" { + v.Set("creator_id", filter.CreatorId) + } + if filter.OnlyConfirmed { + v.Set("only_confirmed", "true") + } + if filter.PluginID != "" { + v.Set("plugin_id", filter.PluginID) + } + if filter.OnlyPlugins { + v.Set("only_plugins", "true") + } + if filter.ExcludePlugins { + v.Set("exclude_plugins", "true") + } + if filter.IncludeDeleted { + v.Set("include_deleted", "true") + } + url := c.remoteClusterRoute() + if len(v) > 0 { + url += "?" + v.Encode() + } + + r, err := c.DoAPIGet(ctx, url, "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*RemoteCluster](r) +} + +func (c *Client4) CreateRemoteCluster(ctx context.Context, rcWithPassword *RemoteClusterWithPassword) (*RemoteClusterWithInvite, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.remoteClusterRoute(), rcWithPassword) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*RemoteClusterWithInvite](r) +} + +func (c *Client4) RemoteClusterAcceptInvite(ctx context.Context, rcAcceptInvite *RemoteClusterAcceptInvite) (*RemoteCluster, *Response, error) { + url := fmt.Sprintf("%s/accept_invite", c.remoteClusterRoute()) + r, err := c.DoAPIPostJSON(ctx, url, rcAcceptInvite) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*RemoteCluster](r) +} + +func (c *Client4) GenerateRemoteClusterInvite(ctx context.Context, remoteClusterId, password string) (string, *Response, error) { + url := fmt.Sprintf("%s/%s/generate_invite", c.remoteClusterRoute(), remoteClusterId) + r, err := c.DoAPIPostJSON(ctx, url, map[string]string{"password": password}) + if err != nil { + return "", BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[string](r) +} + +func (c *Client4) GetRemoteCluster(ctx context.Context, remoteClusterId string) (*RemoteCluster, *Response, error) { + r, err := c.DoAPIGet(ctx, fmt.Sprintf("%s/%s", c.remoteClusterRoute(), remoteClusterId), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*RemoteCluster](r) +} + +func (c *Client4) PatchRemoteCluster(ctx context.Context, remoteClusterId string, patch *RemoteClusterPatch) (*RemoteCluster, *Response, error) { + url := fmt.Sprintf("%s/%s", c.remoteClusterRoute(), remoteClusterId) + r, err := c.DoAPIPatchJSON(ctx, url, patch) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*RemoteCluster](r) +} + +func (c *Client4) DeleteRemoteCluster(ctx context.Context, remoteClusterId string) (*Response, error) { + r, err := c.DoAPIDelete(ctx, fmt.Sprintf("%s/%s", c.remoteClusterRoute(), remoteClusterId)) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +func (c *Client4) GetSharedChannelRemotesByRemoteCluster(ctx context.Context, remoteId string, filter SharedChannelRemoteFilterOpts, page, perPage int) ([]*SharedChannelRemote, *Response, error) { + v := url.Values{} + if filter.IncludeUnconfirmed { + v.Set("include_unconfirmed", "true") + } + if filter.ExcludeConfirmed { + v.Set("exclude_confirmed", "true") + } + if filter.ExcludeHome { + v.Set("exclude_home", "true") + } + if filter.ExcludeRemote { + v.Set("exclude_remote", "true") + } + if filter.IncludeDeleted { + v.Set("include_deleted", "true") + } + if page != 0 { + v.Set("page", fmt.Sprintf("%d", page)) + } + if perPage != 0 { + v.Set("per_page", fmt.Sprintf("%d", perPage)) + } + url := c.sharedChannelRemotesRoute(remoteId) + if len(v) > 0 { + url += "?" + v.Encode() + } + + r, err := c.DoAPIGet(ctx, url, "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*SharedChannelRemote](r) +} + +func (c *Client4) InviteRemoteClusterToChannel(ctx context.Context, remoteId, channelId string) (*Response, error) { + url := fmt.Sprintf("%s/invite", c.channelRemoteRoute(remoteId, channelId)) + r, err := c.DoAPIPost(ctx, url, "") + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +func (c *Client4) UninviteRemoteClusterToChannel(ctx context.Context, remoteId, channelId string) (*Response, error) { + url := fmt.Sprintf("%s/uninvite", c.channelRemoteRoute(remoteId, channelId)) + r, err := c.DoAPIPost(ctx, url, "") + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +func (c *Client4) GetAncillaryPermissions(ctx context.Context, subsectionPermissions []string) ([]string, *Response, error) { + var returnedPermissions []string + url := fmt.Sprintf("%s/ancillary", c.permissionsRoute()) + r, err := c.DoAPIPostJSON(ctx, url, subsectionPermissions) + if err != nil { + return returnedPermissions, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]string](r) +} + +func (c *Client4) GetUsersWithInvalidEmails(ctx context.Context, page, perPage int) ([]*User, *Response, error) { + values := url.Values{} + values.Set("page", strconv.Itoa(page)) + values.Set("per_page", strconv.Itoa(perPage)) + r, err := c.DoAPIGet(ctx, c.usersRoute()+"/invalid_emails?"+values.Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*User](r) +} + +func (c *Client4) GetAppliedSchemaMigrations(ctx context.Context) ([]AppliedMigration, *Response, error) { + r, err := c.DoAPIGet(ctx, c.systemRoute()+"/schema/version", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]AppliedMigration](r) +} + +// Usage Section + +// GetPostsUsage returns rounded off total usage of posts for the instance +func (c *Client4) GetPostsUsage(ctx context.Context) (*PostsUsage, *Response, error) { + r, err := c.DoAPIGet(ctx, c.usageRoute()+"/posts", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*PostsUsage](r) +} + +// GetStorageUsage returns the file storage usage for the instance, +// rounded down the most signigicant digit +func (c *Client4) GetStorageUsage(ctx context.Context) (*StorageUsage, *Response, error) { + r, err := c.DoAPIGet(ctx, c.usageRoute()+"/storage", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*StorageUsage](r) +} + +// GetTeamsUsage returns total usage of teams for the instance +func (c *Client4) GetTeamsUsage(ctx context.Context) (*TeamsUsage, *Response, error) { + r, err := c.DoAPIGet(ctx, c.usageRoute()+"/teams", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*TeamsUsage](r) +} + +func (c *Client4) GetPostInfo(ctx context.Context, postId string) (*PostInfo, *Response, error) { + r, err := c.DoAPIGet(ctx, c.postRoute(postId)+"/info", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*PostInfo](r) +} + +func (c *Client4) AcknowledgePost(ctx context.Context, postId, userId string) (*PostAcknowledgement, *Response, error) { + r, err := c.DoAPIPost(ctx, c.userRoute(userId)+c.postRoute(postId)+"/ack", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*PostAcknowledgement](r) +} + +func (c *Client4) UnacknowledgePost(ctx context.Context, postId, userId string) (*Response, error) { + r, err := c.DoAPIDelete(ctx, c.userRoute(userId)+c.postRoute(postId)+"/ack") + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +func (c *Client4) AddUserToGroupSyncables(ctx context.Context, userID string) (*Response, error) { + r, err := c.DoAPIPost(ctx, c.ldapRoute()+"/users/"+userID+"/group_sync_memberships", "") + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +func (c *Client4) CheckCWSConnection(ctx context.Context, userId string) (*Response, error) { + r, err := c.DoAPIGet(ctx, c.cloudRoute()+"/healthz", "") + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + + return BuildResponse(r), nil +} + +// CreateChannelBookmark creates a channel bookmark based on the provided struct. +func (c *Client4) CreateChannelBookmark(ctx context.Context, channelBookmark *ChannelBookmark) (*ChannelBookmarkWithFileInfo, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.bookmarksRoute(channelBookmark.ChannelId), channelBookmark) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*ChannelBookmarkWithFileInfo](r) +} + +// UpdateChannelBookmark updates a channel bookmark based on the provided struct. +func (c *Client4) UpdateChannelBookmark(ctx context.Context, channelId, bookmarkId string, patch *ChannelBookmarkPatch) (*UpdateChannelBookmarkResponse, *Response, error) { + r, err := c.DoAPIPatchJSON(ctx, c.bookmarkRoute(channelId, bookmarkId), patch) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*UpdateChannelBookmarkResponse](r) +} + +// UpdateChannelBookmarkSortOrder updates a channel bookmark's sort order based on the provided new index. +func (c *Client4) UpdateChannelBookmarkSortOrder(ctx context.Context, channelId, bookmarkId string, sortOrder int64) ([]*ChannelBookmarkWithFileInfo, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.bookmarkRoute(channelId, bookmarkId)+"/sort_order", sortOrder) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*ChannelBookmarkWithFileInfo](r) +} + +// DeleteChannelBookmark deletes a channel bookmark. +func (c *Client4) DeleteChannelBookmark(ctx context.Context, channelId, bookmarkId string) (*ChannelBookmarkWithFileInfo, *Response, error) { + r, err := c.DoAPIDelete(ctx, c.bookmarkRoute(channelId, bookmarkId)) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*ChannelBookmarkWithFileInfo](r) +} + +func (c *Client4) ListChannelBookmarksForChannel(ctx context.Context, channelId string, since int64) ([]*ChannelBookmarkWithFileInfo, *Response, error) { + values := url.Values{} + values.Set("bookmarks_since", strconv.FormatInt(since, 10)) + r, err := c.DoAPIGet(ctx, c.bookmarksRoute(channelId)+"?"+values.Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*ChannelBookmarkWithFileInfo](r) +} + +func (c *Client4) SubmitClientMetrics(ctx context.Context, report *PerformanceReport) (*Response, error) { + res, err := c.DoAPIPostJSON(ctx, c.clientPerfMetricsRoute(), report) + if err != nil { + return BuildResponse(res), err + } + + return BuildResponse(res), nil +} + +func (c *Client4) GetFilteredUsersStats(ctx context.Context, options *UserCountOptions) (*UsersStats, *Response, error) { + v := url.Values{} + v.Set("in_team", options.TeamId) + v.Set("in_channel", options.ChannelId) + v.Set("include_deleted", strconv.FormatBool(options.IncludeDeleted)) + v.Set("include_bots", strconv.FormatBool(options.IncludeBotAccounts)) + v.Set("include_remote_users", strconv.FormatBool(options.IncludeRemoteUsers)) + + if len(options.Roles) > 0 { + v.Set("roles", strings.Join(options.Roles, ",")) + } + if len(options.ChannelRoles) > 0 { + v.Set("channel_roles", strings.Join(options.ChannelRoles, ",")) + } + if len(options.TeamRoles) > 0 { + v.Set("team_roles", strings.Join(options.TeamRoles, ",")) + } + + r, err := c.DoAPIGet(ctx, c.usersRoute()+"/stats/filtered?"+v.Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*UsersStats](r) +} + +func (c *Client4) RestorePostVersion(ctx context.Context, postId, versionId string) (*Post, *Response, error) { + r, err := c.DoAPIPost(ctx, c.postRoute(postId)+"/restore/"+versionId, "") + if err != nil { + return nil, BuildResponse(r), err + } + + defer closeBody(r) + return DecodeJSONFromResponse[*Post](r) +} + +func (c *Client4) CreateCPAField(ctx context.Context, field *PropertyField) (*PropertyField, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.customProfileAttributeFieldsRoute(), field) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*PropertyField](r) +} + +func (c *Client4) ListCPAFields(ctx context.Context) ([]*PropertyField, *Response, error) { + r, err := c.DoAPIGet(ctx, c.customProfileAttributeFieldsRoute(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]*PropertyField](r) +} + +func (c *Client4) PatchCPAField(ctx context.Context, fieldID string, patch *PropertyFieldPatch) (*PropertyField, *Response, error) { + r, err := c.DoAPIPatchJSON(ctx, c.customProfileAttributeFieldRoute(fieldID), patch) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*PropertyField](r) +} + +func (c *Client4) DeleteCPAField(ctx context.Context, fieldID string) (*Response, error) { + r, err := c.DoAPIDelete(ctx, c.customProfileAttributeFieldRoute(fieldID)) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + return BuildResponse(r), nil +} + +func (c *Client4) ListCPAValues(ctx context.Context, userID string) (map[string]json.RawMessage, *Response, error) { + r, err := c.DoAPIGet(ctx, c.userCustomProfileAttributesRoute(userID), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[map[string]json.RawMessage](r) +} + +func (c *Client4) PatchCPAValues(ctx context.Context, values map[string]json.RawMessage) (map[string]json.RawMessage, *Response, error) { + r, err := c.DoAPIPatchJSON(ctx, c.customProfileAttributeValuesRoute(), values) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[map[string]json.RawMessage](r) +} + +func (c *Client4) PatchCPAValuesForUser(ctx context.Context, userID string, values map[string]json.RawMessage) (map[string]json.RawMessage, *Response, error) { + r, err := c.DoAPIPatchJSON(ctx, c.userCustomProfileAttributesRoute(userID), values) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[map[string]json.RawMessage](r) +} + +func (c *Client4) GetPostPropertyValues(ctx context.Context, postId string) ([]PropertyValue, *Response, error) { + r, err := c.DoAPIGet(ctx, c.contentFlaggingRoute()+"/post/"+postId+"/field_values", "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]PropertyValue](r) // TODO: Fix! +} + +// Access Control Policies Section + +// CreateAccessControlPolicy creates a new access control policy. +func (c *Client4) CreateAccessControlPolicy(ctx context.Context, policy *AccessControlPolicy) (*AccessControlPolicy, *Response, error) { + r, err := c.DoAPIPutJSON(ctx, c.accessControlPoliciesRoute(), policy) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*AccessControlPolicy](r) +} + +func (c *Client4) GetAccessControlPolicy(ctx context.Context, id string) (*AccessControlPolicy, *Response, error) { + r, err := c.DoAPIGet(ctx, c.accessControlPolicyRoute(id), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*AccessControlPolicy](r) +} + +func (c *Client4) DeleteAccessControlPolicy(ctx context.Context, id string) (*Response, error) { + r, err := c.DoAPIDelete(ctx, c.accessControlPolicyRoute(id)) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + + return BuildResponse(r), nil +} + +func (c *Client4) CheckExpression(ctx context.Context, expression string, channelId ...string) ([]CELExpressionError, *Response, error) { + checkExpressionRequest := struct { + Expression string `json:"expression"` + ChannelId string `json:"channelId,omitempty"` + }{ + Expression: expression, + } + if len(channelId) > 0 && channelId[0] != "" { + checkExpressionRequest.ChannelId = channelId[0] + } + r, err := c.DoAPIPostJSON(ctx, c.celRoute()+"/check", checkExpressionRequest) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[[]CELExpressionError](r) +} + +func (c *Client4) TestExpression(ctx context.Context, params QueryExpressionParams) (*AccessControlPolicyTestResponse, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.celRoute()+"/test", params) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*AccessControlPolicyTestResponse](r) +} + +func (c *Client4) SearchAccessControlPolicies(ctx context.Context, options AccessControlPolicySearch) (*AccessControlPoliciesWithCount, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.accessControlPoliciesRoute()+"/search", options) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*AccessControlPoliciesWithCount](r) +} + +func (c *Client4) AssignAccessControlPolicies(ctx context.Context, policyID string, resourceIDs []string) (*Response, error) { + var assignments struct { + ChannelIds []string `json:"channel_ids"` + } + assignments.ChannelIds = resourceIDs + + r, err := c.DoAPIPostJSON(ctx, c.accessControlPolicyRoute(policyID)+"/assign", assignments) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + + return BuildResponse(r), nil +} + +func (c *Client4) UnassignAccessControlPolicies(ctx context.Context, policyID string, resourceIDs []string) (*Response, error) { + var unassignments struct { + ChannelIds []string `json:"channel_ids"` + } + unassignments.ChannelIds = resourceIDs + + r, err := c.DoAPIDeleteJSON(ctx, c.accessControlPolicyRoute(policyID)+"/unassign", unassignments) + if err != nil { + return BuildResponse(r), err + } + defer closeBody(r) + + return BuildResponse(r), nil +} + +func (c *Client4) GetChannelsForAccessControlPolicy(ctx context.Context, policyID string, after string, limit int) (*ChannelsWithCount, *Response, error) { + values := url.Values{} + values.Set("after", after) + values.Set("limit", strconv.Itoa(limit)) + r, err := c.DoAPIGet(ctx, c.accessControlPolicyRoute(policyID)+"/resources/channels?"+values.Encode(), "") + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*ChannelsWithCount](r) +} + +func (c *Client4) SearchChannelsForAccessControlPolicy(ctx context.Context, policyID string, options ChannelSearch) (*ChannelsWithCount, *Response, error) { + r, err := c.DoAPIPostJSON(ctx, c.accessControlPolicyRoute(policyID)+"/resources/channels/search", options) + if err != nil { + return nil, BuildResponse(r), err + } + defer closeBody(r) + return DecodeJSONFromResponse[*ChannelsWithCount](r) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/cloud.go b/vendor/github.com/mattermost/mattermost/server/public/model/cloud.go new file mode 100644 index 00000000..a2da17c4 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/cloud.go @@ -0,0 +1,385 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "encoding/json" + "strings" + "time" +) + +const ( + EventTypeFailedPayment = "failed-payment" + EventTypeFailedPaymentNoCard = "failed-payment-no-card" + EventTypeSendAdminWelcomeEmail = "send-admin-welcome-email" + EventTypeSendUpgradeConfirmationEmail = "send-upgrade-confirmation-email" + EventTypeSubscriptionChanged = "subscription-changed" + EventTypeTriggerDelinquencyEmail = "trigger-delinquency-email" +) + +const UpcomingInvoice = "upcoming" + +var MockCWS string + +type BillingScheme string + +const ( + BillingSchemePerSeat = BillingScheme("per_seat") + BillingSchemeFlatFee = BillingScheme("flat_fee") + BillingSchemeSalesServe = BillingScheme("sales_serve") +) + +type BillingType string + +const ( + BillingTypeLicensed = BillingType("licensed") + BillingTypeInternal = BillingType("internal") +) + +type RecurringInterval string + +const ( + RecurringIntervalYearly = RecurringInterval("year") + RecurringIntervalMonthly = RecurringInterval("month") +) + +type SubscriptionFamily string + +const ( + SubscriptionFamilyCloud = SubscriptionFamily("cloud") + SubscriptionFamilyOnPrem = SubscriptionFamily("on-prem") +) + +type ProductSku string + +const ( + SkuStarterGov = ProductSku("starter-gov") + SkuProfessionalGov = ProductSku("professional-gov") + SkuEnterpriseGov = ProductSku("enterprise-gov") + SkuStarter = ProductSku("starter") + SkuProfessional = ProductSku("professional") + SkuEnterprise = ProductSku("enterprise") + SkuCloudStarter = ProductSku("cloud-starter") + SkuCloudProfessional = ProductSku("cloud-professional") + SkuCloudEnterprise = ProductSku("cloud-enterprise") +) + +// Product model represents a product on the cloud system. +type Product struct { + ID string `json:"id"` + Name string `json:"name"` + Description string `json:"description"` + PricePerSeat float64 `json:"price_per_seat"` + AddOns []*AddOn `json:"add_ons"` + SKU string `json:"sku"` + PriceID string `json:"price_id"` + Family SubscriptionFamily `json:"product_family"` + RecurringInterval RecurringInterval `json:"recurring_interval"` + BillingScheme BillingScheme `json:"billing_scheme"` + CrossSellsTo string `json:"cross_sells_to"` +} + +type UserFacingProduct struct { + ID string `json:"id"` + Name string `json:"name"` + SKU string `json:"sku"` + PricePerSeat float64 `json:"price_per_seat"` + RecurringInterval RecurringInterval `json:"recurring_interval"` + CrossSellsTo string `json:"cross_sells_to"` +} + +// AddOn represents an addon to a product. +type AddOn struct { + ID string `json:"id"` + Name string `json:"name"` + DisplayName string `json:"display_name"` + PricePerSeat float64 `json:"price_per_seat"` +} + +// StripeSetupIntent represents the SetupIntent model from Stripe for updating payment methods. +type StripeSetupIntent struct { + ID string `json:"id"` + ClientSecret string `json:"client_secret"` +} + +// ConfirmPaymentMethodRequest contains the fields for the customer payment update API. +type ConfirmPaymentMethodRequest struct { + StripeSetupIntentID string `json:"stripe_setup_intent_id"` + SubscriptionID string `json:"subscription_id"` +} + +// Customer model represents a customer on the system. +type CloudCustomer struct { + CloudCustomerInfo + ID string `json:"id"` + CreatorID string `json:"creator_id"` + CreateAt int64 `json:"create_at"` + BillingAddress *Address `json:"billing_address"` + CompanyAddress *Address `json:"company_address"` + PaymentMethod *PaymentMethod `json:"payment_method"` +} + +type StartCloudTrialRequest struct { + Email string `json:"email"` + SubscriptionID string `json:"subscription_id"` +} + +type ValidateBusinessEmailRequest struct { + Email string `json:"email"` +} + +type ValidateBusinessEmailResponse struct { + IsValid bool `json:"is_valid"` +} + +type SubscriptionLicenseSelfServeStatusResponse struct { + IsExpandable bool `json:"is_expandable"` + IsRenewable bool `json:"is_renewable"` +} + +// CloudCustomerInfo represents editable info of a customer. +type CloudCustomerInfo struct { + Name string `json:"name"` + Email string `json:"email,omitempty"` + ContactFirstName string `json:"contact_first_name,omitempty"` + ContactLastName string `json:"contact_last_name,omitempty"` + NumEmployees int `json:"num_employees"` + CloudAltPaymentMethod string `json:"monthly_subscription_alt_payment_method"` +} + +// Address model represents a customer's address. +type Address struct { + City string `json:"city"` + Country string `json:"country"` + Line1 string `json:"line1"` + Line2 string `json:"line2"` + PostalCode string `json:"postal_code"` + State string `json:"state"` +} + +// PaymentMethod represents methods of payment for a customer. +type PaymentMethod struct { + Type string `json:"type"` + LastFour string `json:"last_four"` + ExpMonth int `json:"exp_month"` + ExpYear int `json:"exp_year"` + CardBrand string `json:"card_brand"` + Name string `json:"name"` +} + +// Subscription model represents a subscription on the system. +type Subscription struct { + ID string `json:"id"` + CustomerID string `json:"customer_id"` + ProductID string `json:"product_id"` + AddOns []string `json:"add_ons"` + StartAt int64 `json:"start_at"` + EndAt int64 `json:"end_at"` + CreateAt int64 `json:"create_at"` + Seats int `json:"seats"` + Status string `json:"status"` + DNS string `json:"dns"` + LastInvoice *Invoice `json:"last_invoice"` + UpcomingInvoice *Invoice `json:"upcoming_invoice"` + IsFreeTrial string `json:"is_free_trial"` + TrialEndAt int64 `json:"trial_end_at"` + DelinquentSince *int64 `json:"delinquent_since"` + OriginallyLicensedSeats int `json:"originally_licensed_seats"` + ComplianceBlocked string `json:"compliance_blocked"` + BillingType string `json:"billing_type"` + CancelAt *int64 `json:"cancel_at"` + WillRenew string `json:"will_renew"` + SimulatedCurrentTimeMs *int64 `json:"simulated_current_time_ms"` + IsCloudPreview bool `json:"is_cloud_preview"` +} + +func (s *Subscription) DaysToExpiration() int64 { + now := time.Now().UnixMilli() + if GetServiceEnvironment() == ServiceEnvironmentTest { + // In the test environment we have test clocks. A test clock is a ms timestamp + // If it's not nil, we use it as the current time in all calculations + if s.SimulatedCurrentTimeMs != nil { + now = *s.SimulatedCurrentTimeMs + } + } + daysToExpiry := (s.EndAt - now) / (1000 * 60 * 60 * 24) + return daysToExpiry +} + +// Subscription History model represents true up event in a yearly subscription +type SubscriptionHistory struct { + ID string `json:"id"` + SubscriptionID string `json:"subscription_id"` + Seats int `json:"seats"` + CreateAt int64 `json:"create_at"` +} + +type SubscriptionHistoryChange struct { + SubscriptionID string `json:"subscription_id"` + Seats int `json:"seats"` + CreateAt int64 `json:"create_at"` +} + +// GetWorkSpaceNameFromDNS returns the work space name. For example from test.mattermost.cloud.com, it returns test +func (s *Subscription) GetWorkSpaceNameFromDNS() string { + return strings.Split(s.DNS, ".")[0] +} + +// Invoice model represents a cloud invoice +type Invoice struct { + ID string `json:"id"` + Number string `json:"number"` + CreateAt int64 `json:"create_at"` + Total int64 `json:"total"` + Tax int64 `json:"tax"` + Status string `json:"status"` + Description string `json:"description"` + PeriodStart int64 `json:"period_start"` + PeriodEnd int64 `json:"period_end"` + SubscriptionID string `json:"subscription_id"` + Items []*InvoiceLineItem `json:"line_items"` + CurrentProductName string `json:"current_product_name"` +} + +// InvoiceLineItem model represents a cloud invoice lineitem tied to an invoice. +type InvoiceLineItem struct { + PriceID string `json:"price_id"` + Total int64 `json:"total"` + Quantity float64 `json:"quantity"` + PricePerUnit int64 `json:"price_per_unit"` + Description string `json:"description"` + Type string `json:"type"` + Metadata map[string]any `json:"metadata"` + PeriodStart int64 `json:"period_start"` + PeriodEnd int64 `json:"period_end"` +} + +type DelinquencyEmailTrigger struct { + EmailToTrigger string `json:"email_to_send"` +} + +type DelinquencyEmail string + +const ( + DelinquencyEmail7 DelinquencyEmail = "7" + DelinquencyEmail14 DelinquencyEmail = "14" + DelinquencyEmail30 DelinquencyEmail = "30" + DelinquencyEmail45 DelinquencyEmail = "45" + DelinquencyEmail60 DelinquencyEmail = "60" + DelinquencyEmail75 DelinquencyEmail = "75" + DelinquencyEmail90 DelinquencyEmail = "90" +) + +type CWSWebhookPayload struct { + Event string `json:"event"` + FailedPayment *FailedPayment `json:"failed_payment"` + CloudWorkspaceOwner *CloudWorkspaceOwner `json:"cloud_workspace_owner"` + ProductLimits *ProductLimits `json:"product_limits"` + Subscription *Subscription `json:"subscription"` + SubscriptionTrialEndUnixTimeStamp int64 `json:"trial_end_time_stamp"` + DelinquencyEmail *DelinquencyEmailTrigger `json:"delinquency_email"` +} + +type FailedPayment struct { + CardBrand string `json:"card_brand"` + LastFour string `json:"last_four"` + FailureMessage string `json:"failure_message"` +} + +// CloudWorkspaceOwner is part of the CWS Webhook payload that contains information about the user that created the workspace from the CWS +type CloudWorkspaceOwner struct { + UserName string `json:"username"` +} + +type SubscriptionChange struct { + ProductID string `json:"product_id"` + Seats int `json:"seats"` + Feedback *Feedback `json:"downgrade_feedback"` + ShippingAddress *Address `json:"shipping_address"` + Customer *CloudCustomerInfo `json:"customer"` +} + +type FilesLimits struct { + TotalStorage *int64 `json:"total_storage"` +} + +type MessagesLimits struct { + History *int `json:"history"` +} + +type TeamsLimits struct { + Active *int `json:"active"` +} + +type ProductLimits struct { + Files *FilesLimits `json:"files,omitempty"` + Messages *MessagesLimits `json:"messages,omitempty"` + Teams *TeamsLimits `json:"teams,omitempty"` +} + +// CreateSubscriptionRequest is the parameters for the API request to create a subscription. +type CreateSubscriptionRequest struct { + ProductID string `json:"product_id"` + AddOns []string `json:"add_ons"` + Seats int `json:"seats"` + Total float64 `json:"total"` + InternalPurchaseOrder string `json:"internal_purchase_order"` + DiscountID string `json:"discount_id"` +} + +type Installation struct { + ID string `json:"id"` + State string `json:"state"` + AllowedIPRanges *AllowedIPRanges `json:"allowed_ip_ranges"` +} + +type Feedback struct { + Reason string `json:"reason"` + Comments string `json:"comments"` +} + +type WorkspaceDeletionRequest struct { + SubscriptionID string `json:"subscription_id"` + Feedback *Feedback `json:"delete_feedback"` +} + +// MessageDescriptor represents an i18n message descriptor +type MessageDescriptor struct { + ID string `json:"id"` + DefaultMessage string `json:"defaultMessage"` + Values map[string]any `json:"values,omitempty"` +} + +// PreviewModalContentData represents the structure of modal content data from S3 +type PreviewModalContentData struct { + SKULabel MessageDescriptor `json:"skuLabel"` + Title MessageDescriptor `json:"title"` + Subtitle MessageDescriptor `json:"subtitle"` + VideoURL string `json:"videoUrl"` + VideoPoster string `json:"videoPoster,omitempty"` + UseCase string `json:"useCase"` +} + +func (p *Product) IsYearly() bool { + return p.RecurringInterval == RecurringIntervalYearly +} + +func (p *Product) IsMonthly() bool { + return p.RecurringInterval == RecurringIntervalMonthly +} + +func (df *Feedback) ToMap() map[string]any { + var res map[string]any + feedback, err := json.Marshal(df) + if err != nil { + return res + } + + err = json.Unmarshal(feedback, &res) + if err != nil { + return res + } + + return res +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/cluster_discovery.go b/vendor/github.com/mattermost/mattermost/server/public/model/cluster_discovery.go new file mode 100644 index 00000000..90052a64 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/cluster_discovery.go @@ -0,0 +1,115 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "net/http" + "os" +) + +const ( + CDSOfflineAfterMillis = 1000 * 60 * 30 // 30 minutes + CDSTypeApp = "mattermost_app" +) + +type ClusterDiscovery struct { + Id string `json:"id"` + Type string `json:"type"` + ClusterName string `json:"cluster_name"` + Hostname string `json:"hostname"` + GossipPort int32 `json:"gossip_port"` + Port int32 `json:"port"` // Deperacted: Port is unused. It's only kept for backwards compatibility. + CreateAt int64 `json:"create_at"` + LastPingAt int64 `json:"last_ping_at"` +} + +func (o *ClusterDiscovery) PreSave() { + if o.Id == "" { + o.Id = NewId() + } + + if o.CreateAt == 0 { + o.CreateAt = GetMillis() + o.LastPingAt = o.CreateAt + } +} + +func (o *ClusterDiscovery) AutoFillHostname() { + // attempt to set the hostname from the OS + if o.Hostname == "" { + if hn, err := os.Hostname(); err == nil { + o.Hostname = hn + } + } +} + +func (o *ClusterDiscovery) AutoFillIPAddress(iface string, ipAddress string) { + // attempt to set the hostname to the first non-local IP address + if o.Hostname == "" { + if ipAddress != "" { + o.Hostname = ipAddress + } else { + o.Hostname = GetServerIPAddress(iface) + } + } +} + +func (o *ClusterDiscovery) IsEqual(in *ClusterDiscovery) bool { + if in == nil { + return false + } + + if o.Type != in.Type { + return false + } + + if o.ClusterName != in.ClusterName { + return false + } + + if o.Hostname != in.Hostname { + return false + } + + return true +} + +func FilterClusterDiscovery(vs []*ClusterDiscovery, f func(*ClusterDiscovery) bool) []*ClusterDiscovery { + cdCopy := make([]*ClusterDiscovery, 0) + for _, v := range vs { + if f(v) { + cdCopy = append(cdCopy, v) + } + } + + return cdCopy +} + +func (o *ClusterDiscovery) IsValid() *AppError { + if !IsValidId(o.Id) { + return NewAppError("ClusterDiscovery.IsValid", "model.cluster.is_valid.id.app_error", nil, "", http.StatusBadRequest) + } + + if o.ClusterName == "" { + return NewAppError("ClusterDiscovery.IsValid", "model.cluster.is_valid.name.app_error", nil, "", http.StatusBadRequest) + } + + if o.Type == "" { + return NewAppError("ClusterDiscovery.IsValid", "model.cluster.is_valid.type.app_error", nil, "", http.StatusBadRequest) + } + + if o.Hostname == "" { + return NewAppError("ClusterDiscovery.IsValid", "model.cluster.is_valid.hostname.app_error", nil, "", http.StatusBadRequest) + } + + if o.CreateAt == 0 { + return NewAppError("ClusterDiscovery.IsValid", "model.cluster.is_valid.create_at.app_error", nil, "", http.StatusBadRequest) + } + + if o.LastPingAt == 0 { + return NewAppError("ClusterDiscovery.IsValid", "model.cluster.is_valid.last_ping_at.app_error", nil, "", http.StatusBadRequest) + } + + return nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/cluster_info.go b/vendor/github.com/mattermost/mattermost/server/public/model/cluster_info.go new file mode 100644 index 00000000..e69401bc --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/cluster_info.go @@ -0,0 +1,13 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +type ClusterInfo struct { + Id string `json:"id"` + Version string `json:"version"` + SchemaVersion string `json:"schema_version"` + ConfigHash string `json:"config_hash"` + IPAddress string `json:"ipaddress"` + Hostname string `json:"hostname"` +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/cluster_message.go b/vendor/github.com/mattermost/mattermost/server/public/model/cluster_message.go new file mode 100644 index 00000000..faf0625c --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/cluster_message.go @@ -0,0 +1,76 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +type ClusterEvent string + +const ( + ClusterEventNone ClusterEvent = "none" + ClusterEventPublish ClusterEvent = "publish" + ClusterEventUpdateStatus ClusterEvent = "update_status" + ClusterEventInvalidateAllCaches ClusterEvent = "inv_all_caches" + ClusterEventInvalidateCacheForReactions ClusterEvent = "inv_reactions" + ClusterEventInvalidateCacheForChannelMembersNotifyProps ClusterEvent = "inv_channel_members_notify_props" + ClusterEventInvalidateCacheForChannelByName ClusterEvent = "inv_channel_name" + ClusterEventInvalidateCacheForChannel ClusterEvent = "inv_channel" + ClusterEventInvalidateCacheForChannelGuestCount ClusterEvent = "inv_channel_guest_count" + ClusterEventInvalidateCacheForUser ClusterEvent = "inv_user" + ClusterEventInvalidateWebConnCacheForUser ClusterEvent = "inv_user_teams" + ClusterEventClearSessionCacheForUser ClusterEvent = "clear_session_user" + ClusterEventInvalidateCacheForRoles ClusterEvent = "inv_roles" + ClusterEventInvalidateCacheForRolePermissions ClusterEvent = "inv_role_permissions" + ClusterEventInvalidateCacheForProfileByIds ClusterEvent = "inv_profile_ids" + ClusterEventInvalidateCacheForAllProfiles ClusterEvent = "inv_all_profiles" + ClusterEventInvalidateCacheForProfileInChannel ClusterEvent = "inv_profile_in_channel" + ClusterEventInvalidateCacheForSchemes ClusterEvent = "inv_schemes" + ClusterEventInvalidateCacheForFileInfos ClusterEvent = "inv_file_infos" + ClusterEventInvalidateCacheForWebhooks ClusterEvent = "inv_webhooks" + ClusterEventInvalidateCacheForEmojisById ClusterEvent = "inv_emojis_by_id" + ClusterEventInvalidateCacheForEmojisIdByName ClusterEvent = "inv_emojis_id_by_name" + ClusterEventInvalidateCacheForChannelFileCount ClusterEvent = "inv_channel_file_count" + ClusterEventInvalidateCacheForChannelPinnedpostsCounts ClusterEvent = "inv_channel_pinnedposts_counts" + ClusterEventInvalidateCacheForChannelMemberCounts ClusterEvent = "inv_channel_member_counts" + ClusterEventInvalidateCacheForChannelsMemberCount ClusterEvent = "inv_channels_member_count" + ClusterEventInvalidateCacheForLastPosts ClusterEvent = "inv_last_posts" + ClusterEventInvalidateCacheForLastPostTime ClusterEvent = "inv_last_post_time" + ClusterEventInvalidateCacheForPostsUsage ClusterEvent = "inv_posts_usage" + ClusterEventInvalidateCacheForTeams ClusterEvent = "inv_teams" + ClusterEventInvalidateCacheForContentFlagging ClusterEvent = "inv_content_flagging" + ClusterEventClearSessionCacheForAllUsers ClusterEvent = "inv_all_user_sessions" + ClusterEventInstallPlugin ClusterEvent = "install_plugin" + ClusterEventRemovePlugin ClusterEvent = "remove_plugin" + ClusterEventPluginEvent ClusterEvent = "plugin_event" + ClusterEventInvalidateCacheForTermsOfService ClusterEvent = "inv_terms_of_service" + ClusterEventBusyStateChanged ClusterEvent = "busy_state_change" + // Note: if you are adding a new event, please also add it in the slice of + // m.ClusterEventMap in metrics/metrics.go file. + + // Gossip communication + ClusterGossipEventRequestGetLogs = "gossip_request_get_logs" + ClusterGossipEventResponseGetLogs = "gossip_response_get_logs" + ClusterGossipEventRequestGenerateSupportPacket = "gossip_request_generate_support_packet" + ClusterGossipEventResponseGenerateSupportPacket = "gossip_response_generate_support_packet" + ClusterGossipEventRequestGetClusterStats = "gossip_request_cluster_stats" + ClusterGossipEventResponseGetClusterStats = "gossip_response_cluster_stats" + ClusterGossipEventRequestGetPluginStatuses = "gossip_request_plugin_statuses" + ClusterGossipEventResponseGetPluginStatuses = "gossip_response_plugin_statuses" + ClusterGossipEventRequestSaveConfig = "gossip_request_save_config" + ClusterGossipEventResponseSaveConfig = "gossip_response_save_config" + ClusterGossipEventRequestWebConnCount = "gossip_request_webconn_count" + ClusterGossipEventResponseWebConnCount = "gossip_response_webconn_count" + ClusterGossipEventRequestWSQueues = "gossip_request_ws_queues" + ClusterGossipEventResponseWSQueues = "gossip_response_ws_queues" + + // SendTypes for ClusterMessage. + ClusterSendBestEffort = "best_effort" + ClusterSendReliable = "reliable" +) + +type ClusterMessage struct { + Event ClusterEvent `json:"event"` + SendType string `json:"-"` + WaitForAllToSend bool `json:"-"` + Data []byte `json:"data,omitempty"` + Props map[string]string `json:"props,omitempty"` +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/cluster_stats.go b/vendor/github.com/mattermost/mattermost/server/public/model/cluster_stats.go new file mode 100644 index 00000000..3b41cb6e --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/cluster_stats.go @@ -0,0 +1,11 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +type ClusterStats struct { + Id string `json:"id"` + TotalWebsocketConnections int `json:"total_websocket_connections"` + TotalReadDbConnections int `json:"total_read_db_connections"` + TotalMasterDbConnections int `json:"total_master_db_connections"` +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/command.go b/vendor/github.com/mattermost/mattermost/server/public/model/command.go new file mode 100644 index 00000000..d48fee8f --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/command.go @@ -0,0 +1,156 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "net/http" + "strings" +) + +const ( + CommandMethodPost = "P" + CommandMethodGet = "G" + MinTriggerLength = 1 + MaxTriggerLength = 128 +) + +type Command struct { + Id string `json:"id"` + Token string `json:"token"` + CreateAt int64 `json:"create_at"` + UpdateAt int64 `json:"update_at"` + DeleteAt int64 `json:"delete_at"` + CreatorId string `json:"creator_id"` + TeamId string `json:"team_id"` + Trigger string `json:"trigger"` + Method string `json:"method"` + Username string `json:"username"` + IconURL string `json:"icon_url"` + AutoComplete bool `json:"auto_complete"` + AutoCompleteDesc string `json:"auto_complete_desc"` + AutoCompleteHint string `json:"auto_complete_hint"` + DisplayName string `json:"display_name"` + Description string `json:"description"` + URL string `json:"url"` + // PluginId records the id of the plugin that created this Command. If it is blank, the Command + // was not created by a plugin. + PluginId string `json:"plugin_id"` + AutocompleteData *AutocompleteData `db:"-" json:"autocomplete_data,omitempty"` + // AutocompleteIconData is a base64 encoded svg + AutocompleteIconData string `db:"-" json:"autocomplete_icon_data,omitempty"` +} + +func (o *Command) Auditable() map[string]any { + return map[string]any{ + "id": o.Id, + "create_at": o.CreateAt, + "update_at": o.UpdateAt, + "delete_at": o.DeleteAt, + "creator_id": o.CreatorId, + "team_id": o.TeamId, + "trigger": o.Trigger, + "username": o.Username, + "icon_url": o.IconURL, + "auto_complete": o.AutoComplete, + "auto_complete_desc": o.AutoCompleteDesc, + "auto_complete_hint": o.AutoCompleteHint, + "display_name": o.DisplayName, + "description": o.Description, + "url": o.URL, + } +} + +func (o *Command) IsValid() *AppError { + if !IsValidId(o.Id) { + return NewAppError("Command.IsValid", "model.command.is_valid.id.app_error", nil, "", http.StatusBadRequest) + } + + if len(o.Token) != 26 { + return NewAppError("Command.IsValid", "model.command.is_valid.token.app_error", nil, "", http.StatusBadRequest) + } + + if o.CreateAt == 0 { + return NewAppError("Command.IsValid", "model.command.is_valid.create_at.app_error", nil, "", http.StatusBadRequest) + } + + if o.UpdateAt == 0 { + return NewAppError("Command.IsValid", "model.command.is_valid.update_at.app_error", nil, "", http.StatusBadRequest) + } + + // If the CreatorId is blank, this should be a command created by a plugin. + if o.CreatorId == "" && !IsValidPluginId(o.PluginId) { + return NewAppError("Command.IsValid", "model.command.is_valid.plugin_id.app_error", nil, "", http.StatusBadRequest) + } + + // If the PluginId is blank, this should be a command associated with a userId. + if o.PluginId == "" && !IsValidId(o.CreatorId) { + return NewAppError("Command.IsValid", "model.command.is_valid.user_id.app_error", nil, "", http.StatusBadRequest) + } + + if o.CreatorId != "" && o.PluginId != "" { + return NewAppError("Command.IsValid", "model.command.is_valid.plugin_id.app_error", nil, "command cannot have both a CreatorId and a PluginId", http.StatusBadRequest) + } + + if !IsValidId(o.TeamId) { + return NewAppError("Command.IsValid", "model.command.is_valid.team_id.app_error", nil, "", http.StatusBadRequest) + } + + if len(o.Trigger) < MinTriggerLength || len(o.Trigger) > MaxTriggerLength || strings.Index(o.Trigger, "/") == 0 || strings.Contains(o.Trigger, " ") { + return NewAppError("Command.IsValid", "model.command.is_valid.trigger.app_error", nil, "", http.StatusBadRequest) + } + + if o.URL == "" || len(o.URL) > 1024 { + return NewAppError("Command.IsValid", "model.command.is_valid.url.app_error", nil, "", http.StatusBadRequest) + } + + if !IsValidHTTPURL(o.URL) { + return NewAppError("Command.IsValid", "model.command.is_valid.url_http.app_error", nil, "", http.StatusBadRequest) + } + + if !(o.Method == CommandMethodGet || o.Method == CommandMethodPost) { + return NewAppError("Command.IsValid", "model.command.is_valid.method.app_error", nil, "", http.StatusBadRequest) + } + + if len(o.DisplayName) > 64 { + return NewAppError("Command.IsValid", "model.command.is_valid.display_name.app_error", nil, "", http.StatusBadRequest) + } + + if len(o.Description) > 128 { + return NewAppError("Command.IsValid", "model.command.is_valid.description.app_error", nil, "", http.StatusBadRequest) + } + + if o.AutocompleteData != nil { + if err := o.AutocompleteData.IsValid(); err != nil { + return NewAppError("Command.IsValid", "model.command.is_valid.autocomplete_data.app_error", nil, "", http.StatusBadRequest).Wrap(err) + } + } + + return nil +} + +func (o *Command) PreSave() { + if o.Id == "" { + o.Id = NewId() + } + + if o.Token == "" { + o.Token = NewId() + } + + o.CreateAt = GetMillis() + o.UpdateAt = o.CreateAt +} + +func (o *Command) PreUpdate() { + o.UpdateAt = GetMillis() +} + +func (o *Command) Sanitize() { + o.Token = "" + o.CreatorId = "" + o.Method = "" + o.URL = "" + o.Username = "" + o.IconURL = "" +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/command_args.go b/vendor/github.com/mattermost/mattermost/server/public/model/command_args.go new file mode 100644 index 00000000..24781c98 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/command_args.go @@ -0,0 +1,55 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "github.com/mattermost/mattermost/server/public/shared/i18n" +) + +type CommandArgs struct { + UserId string `json:"user_id"` + ChannelId string `json:"channel_id"` + TeamId string `json:"team_id"` + RootId string `json:"root_id"` + ParentId string `json:"parent_id"` + TriggerId string `json:"trigger_id,omitempty"` + Command string `json:"command"` + SiteURL string `json:"-"` + T i18n.TranslateFunc `json:"-"` + UserMentions UserMentionMap `json:"-"` + ChannelMentions ChannelMentionMap `json:"-"` +} + +func (o *CommandArgs) Auditable() map[string]any { + return map[string]any{ + "user_id": o.UserId, + "channel_id": o.ChannelId, + "team_id": o.TeamId, + "root_id": o.RootId, + "parent_id": o.ParentId, + "trigger_id": o.TriggerId, + "command": o.Command, + "site_url": o.SiteURL, + } +} + +// AddUserMention adds or overrides an entry in UserMentions with name username +// and identifier userId +func (o *CommandArgs) AddUserMention(username, userId string) { + if o.UserMentions == nil { + o.UserMentions = make(UserMentionMap) + } + + o.UserMentions[username] = userId +} + +// AddChannelMention adds or overrides an entry in ChannelMentions with name +// channelName and identifier channelId +func (o *CommandArgs) AddChannelMention(channelName, channelId string) { + if o.ChannelMentions == nil { + o.ChannelMentions = make(ChannelMentionMap) + } + + o.ChannelMentions[channelName] = channelId +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/command_autocomplete.go b/vendor/github.com/mattermost/mattermost/server/public/model/command_autocomplete.go new file mode 100644 index 00000000..9e12d6b0 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/command_autocomplete.go @@ -0,0 +1,401 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "encoding/json" + "net/url" + "path" + "reflect" + "slices" + "strings" + + "github.com/pkg/errors" +) + +// AutocompleteArgType describes autocomplete argument type +type AutocompleteArgType string + +// Argument types +const ( + AutocompleteArgTypeText AutocompleteArgType = "TextInput" + AutocompleteArgTypeStaticList AutocompleteArgType = "StaticList" + AutocompleteArgTypeDynamicList AutocompleteArgType = "DynamicList" +) + +// AutocompleteData describes slash command autocomplete information. +type AutocompleteData struct { + // Trigger of the command + Trigger string + // Hint of a command + Hint string + // Text displayed to the user to help with the autocomplete description + HelpText string + // Role of the user who should be able to see the autocomplete info of this command + RoleID string + // Arguments of the command. Arguments can be named or positional. + // If they are positional order in the list matters, if they are named order does not matter. + // All arguments should be either named or positional, no mixing allowed. + Arguments []*AutocompleteArg + // Subcommands of the command + SubCommands []*AutocompleteData +} + +// AutocompleteArg describes an argument of the command. Arguments can be named or positional. +// If Name is empty string Argument is positional otherwise it is named argument. +// Named arguments are passed as --Name Argument_Value. +type AutocompleteArg struct { + // Name of the argument + Name string + // Text displayed to the user to help with the autocomplete + HelpText string + // Type of the argument + Type AutocompleteArgType + // Required determines if argument is optional or not. + Required bool + // Actual data of the argument (depends on the Type) + Data any +} + +// AutocompleteTextArg describes text user can input as an argument. +type AutocompleteTextArg struct { + // Hint of the input text + Hint string + // Regex pattern to match + Pattern string +} + +// AutocompleteListItem describes an item in the AutocompleteStaticListArg. +type AutocompleteListItem struct { + Item string + Hint string + HelpText string +} + +// AutocompleteStaticListArg is used to input one of the arguments from the list, +// for example [yes, no], [on, off], and so on. +type AutocompleteStaticListArg struct { + PossibleArguments []AutocompleteListItem +} + +// AutocompleteDynamicListArg is used when user wants to download possible argument list from the URL. +type AutocompleteDynamicListArg struct { + FetchURL string +} + +// AutocompleteSuggestion describes a single suggestion item sent to the front-end +// Example: for user input `/jira cre` - +// Complete might be `/jira create` +// Suggestion might be `create`, +// Hint might be `[issue text]`, +// Description might be `Create a new Issue` +type AutocompleteSuggestion struct { + // Complete describes completed suggestion + Complete string + // Suggestion describes what user might want to input next + Suggestion string + // Hint describes a hint about the suggested input + Hint string + // Description of the command or a suggestion + Description string + // IconData is base64 encoded svg image + IconData string +} + +// NewAutocompleteData returns new Autocomplete data. +func NewAutocompleteData(trigger, hint, helpText string) *AutocompleteData { + return &AutocompleteData{ + Trigger: trigger, + Hint: hint, + HelpText: helpText, + RoleID: SystemUserRoleId, + Arguments: []*AutocompleteArg{}, + SubCommands: []*AutocompleteData{}, + } +} + +// AddCommand adds a subcommand to the autocomplete data. +func (ad *AutocompleteData) AddCommand(command *AutocompleteData) { + ad.SubCommands = append(ad.SubCommands, command) +} + +// AddTextArgument adds positional AutocompleteArgTypeText argument to the command. +func (ad *AutocompleteData) AddTextArgument(helpText, hint, pattern string) { + ad.AddNamedTextArgument("", helpText, hint, pattern, true) +} + +// AddNamedTextArgument adds named AutocompleteArgTypeText argument to the command. +func (ad *AutocompleteData) AddNamedTextArgument(name, helpText, hint, pattern string, required bool) { + argument := AutocompleteArg{ + Name: name, + HelpText: helpText, + Type: AutocompleteArgTypeText, + Required: required, + Data: &AutocompleteTextArg{Hint: hint, Pattern: pattern}, + } + ad.Arguments = append(ad.Arguments, &argument) +} + +// AddStaticListArgument adds positional AutocompleteArgTypeStaticList argument to the command. +func (ad *AutocompleteData) AddStaticListArgument(helpText string, required bool, items []AutocompleteListItem) { + ad.AddNamedStaticListArgument("", helpText, required, items) +} + +// AddNamedStaticListArgument adds named AutocompleteArgTypeStaticList argument to the command. +func (ad *AutocompleteData) AddNamedStaticListArgument(name, helpText string, required bool, items []AutocompleteListItem) { + argument := AutocompleteArg{ + Name: name, + HelpText: helpText, + Type: AutocompleteArgTypeStaticList, + Required: required, + Data: &AutocompleteStaticListArg{PossibleArguments: items}, + } + ad.Arguments = append(ad.Arguments, &argument) +} + +// AddDynamicListArgument adds positional AutocompleteArgTypeDynamicList argument to the command. +func (ad *AutocompleteData) AddDynamicListArgument(helpText, url string, required bool) { + ad.AddNamedDynamicListArgument("", helpText, url, required) +} + +// AddNamedDynamicListArgument adds named AutocompleteArgTypeDynamicList argument to the command. +func (ad *AutocompleteData) AddNamedDynamicListArgument(name, helpText, url string, required bool) { + argument := AutocompleteArg{ + Name: name, + HelpText: helpText, + Type: AutocompleteArgTypeDynamicList, + Required: required, + Data: &AutocompleteDynamicListArg{FetchURL: url}, + } + ad.Arguments = append(ad.Arguments, &argument) +} + +// Equals method checks if command is the same. +func (ad *AutocompleteData) Equals(command *AutocompleteData) bool { + if !(ad.Trigger == command.Trigger && ad.HelpText == command.HelpText && ad.RoleID == command.RoleID && ad.Hint == command.Hint) { + return false + } + if len(ad.Arguments) != len(command.Arguments) || len(ad.SubCommands) != len(command.SubCommands) { + return false + } + for i := range ad.Arguments { + if !ad.Arguments[i].Equals(command.Arguments[i]) { + return false + } + } + for i := range ad.SubCommands { + if !ad.SubCommands[i].Equals(command.SubCommands[i]) { + return false + } + } + return true +} + +// UpdateRelativeURLsForPluginCommands method updates relative urls for plugin commands +func (ad *AutocompleteData) UpdateRelativeURLsForPluginCommands(baseURL *url.URL) error { + for _, arg := range ad.Arguments { + if arg.Type != AutocompleteArgTypeDynamicList { + continue + } + dynamicList, ok := arg.Data.(*AutocompleteDynamicListArg) + if !ok { + return errors.New("Not a proper DynamicList type argument") + } + dynamicListURL, err := url.Parse(dynamicList.FetchURL) + if err != nil { + return errors.Wrapf(err, "FetchURL is not a proper url") + } + if !dynamicListURL.IsAbs() { + absURL := &url.URL{} + *absURL = *baseURL + absURL.Path = path.Join(absURL.Path, dynamicList.FetchURL) + dynamicList.FetchURL = absURL.String() + } + } + for _, command := range ad.SubCommands { + err := command.UpdateRelativeURLsForPluginCommands(baseURL) + if err != nil { + return err + } + } + return nil +} + +// IsValid method checks if autocomplete data is valid. +func (ad *AutocompleteData) IsValid() error { + if ad == nil { + return errors.New("No nil commands are allowed in AutocompleteData") + } + if ad.Trigger == "" { + return errors.New("An empty command name in the autocomplete data") + } + if strings.ToLower(ad.Trigger) != ad.Trigger { + return errors.New("Command should be lowercase") + } + roles := []string{SystemAdminRoleId, SystemUserRoleId, ""} + if !slices.Contains(roles, ad.RoleID) { + return errors.New("Wrong role in the autocomplete data") + } + if len(ad.Arguments) > 0 && len(ad.SubCommands) > 0 { + return errors.New("Command can't have arguments and subcommands") + } + if len(ad.Arguments) > 0 { + namedArgumentIndex := -1 + for i, arg := range ad.Arguments { + if arg.Name != "" { // it's a named argument + if namedArgumentIndex == -1 { // first named argument + namedArgumentIndex = i + } + } else { // it's a positional argument + if namedArgumentIndex != -1 { + return errors.New("Named argument should not be before positional argument") + } + } + if arg.Type == AutocompleteArgTypeDynamicList { + dynamicList, ok := arg.Data.(*AutocompleteDynamicListArg) + if !ok { + return errors.New("Not a proper DynamicList type argument") + } + _, err := url.Parse(dynamicList.FetchURL) + if err != nil { + return errors.Wrapf(err, "FetchURL is not a proper url") + } + } else if arg.Type == AutocompleteArgTypeStaticList { + staticList, ok := arg.Data.(*AutocompleteStaticListArg) + if !ok { + return errors.New("Not a proper StaticList type argument") + } + for _, arg := range staticList.PossibleArguments { + if arg.Item == "" { + return errors.New("Possible argument name not set in StaticList argument") + } + } + } else if arg.Type == AutocompleteArgTypeText { + if _, ok := arg.Data.(*AutocompleteTextArg); !ok { + return errors.New("Not a proper TextInput type argument") + } + if arg.Name == "" && !arg.Required { + return errors.New("Positional argument can not be optional") + } + } + } + } + for _, command := range ad.SubCommands { + err := command.IsValid() + if err != nil { + return err + } + } + return nil +} + +// Equals method checks if argument is the same. +func (a *AutocompleteArg) Equals(arg *AutocompleteArg) bool { + if a.Name != arg.Name || + a.HelpText != arg.HelpText || + a.Type != arg.Type || + a.Required != arg.Required || + !reflect.DeepEqual(a.Data, arg.Data) { + return false + } + return true +} + +// UnmarshalJSON will unmarshal argument +func (a *AutocompleteArg) UnmarshalJSON(b []byte) error { + var arg map[string]any + if err := json.Unmarshal(b, &arg); err != nil { + return errors.Wrapf(err, "Can't unmarshal argument %s", string(b)) + } + var ok bool + a.Name, ok = arg["Name"].(string) + if !ok { + return errors.Errorf("No field Name in the argument %s", string(b)) + } + + a.HelpText, ok = arg["HelpText"].(string) + if !ok { + return errors.Errorf("No field HelpText in the argument %s", string(b)) + } + + t, ok := arg["Type"].(string) + if !ok { + return errors.Errorf("No field Type in the argument %s", string(b)) + } + a.Type = AutocompleteArgType(t) + + a.Required, ok = arg["Required"].(bool) + if !ok { + return errors.Errorf("No field Required in the argument %s", string(b)) + } + + data, ok := arg["Data"] + if !ok { + return errors.Errorf("No field Data in the argument %s", string(b)) + } + + if a.Type == AutocompleteArgTypeText { + m, ok := data.(map[string]any) + if !ok { + return errors.Errorf("Wrong Data type in the TextInput argument %s", string(b)) + } + pattern, ok := m["Pattern"].(string) + if !ok { + return errors.Errorf("No field Pattern in the TextInput argument %s", string(b)) + } + hint, ok := m["Hint"].(string) + if !ok { + return errors.Errorf("No field Hint in the TextInput argument %s", string(b)) + } + a.Data = &AutocompleteTextArg{Hint: hint, Pattern: pattern} + } else if a.Type == AutocompleteArgTypeStaticList { + m, ok := data.(map[string]any) + if !ok { + return errors.Errorf("Wrong Data type in the StaticList argument %s", string(b)) + } + list, ok := m["PossibleArguments"].([]any) + if !ok { + return errors.Errorf("No field PossibleArguments in the StaticList argument %s", string(b)) + } + + possibleArguments := []AutocompleteListItem{} + for i := range list { + args, ok := list[i].(map[string]any) + if !ok { + return errors.Errorf("Wrong AutocompleteStaticListItem type in the StaticList argument %s", string(b)) + } + item, ok := args["Item"].(string) + if !ok { + return errors.Errorf("No field Item in the StaticList's possible arguments %s", string(b)) + } + + hint, ok := args["Hint"].(string) + if !ok { + return errors.Errorf("No field Hint in the StaticList's possible arguments %s", string(b)) + } + helpText, ok := args["HelpText"].(string) + if !ok { + return errors.Errorf("No field Hint in the StaticList's possible arguments %s", string(b)) + } + + possibleArguments = append(possibleArguments, AutocompleteListItem{ + Item: item, + Hint: hint, + HelpText: helpText, + }) + } + a.Data = &AutocompleteStaticListArg{PossibleArguments: possibleArguments} + } else if a.Type == AutocompleteArgTypeDynamicList { + m, ok := data.(map[string]any) + if !ok { + return errors.Errorf("Wrong type in the DynamicList argument %s", string(b)) + } + url, ok := m["FetchURL"].(string) + if !ok { + return errors.Errorf("No field FetchURL in the DynamicList's argument %s", string(b)) + } + a.Data = &AutocompleteDynamicListArg{FetchURL: url} + } + return nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/command_request.go b/vendor/github.com/mattermost/mattermost/server/public/model/command_request.go new file mode 100644 index 00000000..331394cb --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/command_request.go @@ -0,0 +1,8 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +type CommandMoveRequest struct { + TeamId string `json:"team_id"` +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/command_response.go b/vendor/github.com/mattermost/mattermost/server/public/model/command_response.go new file mode 100644 index 00000000..084febc9 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/command_response.go @@ -0,0 +1,71 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "encoding/json" + "io" + "strings" + + "github.com/mattermost/mattermost/server/public/utils" +) + +const ( + CommandResponseTypeInChannel = "in_channel" + CommandResponseTypeEphemeral = "ephemeral" +) + +type CommandResponse struct { + ResponseType string `json:"response_type"` + Text string `json:"text"` + Username string `json:"username"` + ChannelId string `json:"channel_id"` + IconURL string `json:"icon_url"` + Type string `json:"type"` + Props StringInterface `json:"props"` + GotoLocation string `json:"goto_location"` + TriggerId string `json:"trigger_id"` + SkipSlackParsing bool `json:"skip_slack_parsing"` // Set to `true` to skip the Slack-compatibility handling of Text. + Attachments []*SlackAttachment `json:"attachments"` + ExtraResponses []*CommandResponse `json:"extra_responses"` +} + +func CommandResponseFromHTTPBody(contentType string, body io.Reader) (*CommandResponse, error) { + if strings.TrimSpace(strings.Split(contentType, ";")[0]) == "application/json" { + return CommandResponseFromJSON(body) + } + if b, err := io.ReadAll(body); err == nil { + return CommandResponseFromPlainText(string(b)), nil + } + return nil, nil +} + +func CommandResponseFromPlainText(text string) *CommandResponse { + return &CommandResponse{ + Text: text, + } +} + +func CommandResponseFromJSON(data io.Reader) (*CommandResponse, error) { + b, err := io.ReadAll(data) + if err != nil { + return nil, err + } + + var o CommandResponse + err = json.Unmarshal(b, &o) + if err != nil { + return nil, utils.HumanizeJSONError(err, b) + } + + o.Attachments = StringifySlackFieldValue(o.Attachments) + + if o.ExtraResponses != nil { + for _, resp := range o.ExtraResponses { + resp.Attachments = StringifySlackFieldValue(resp.Attachments) + } + } + + return &o, nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/command_webhook.go b/vendor/github.com/mattermost/mattermost/server/public/model/command_webhook.go new file mode 100644 index 00000000..8093c1b7 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/command_webhook.go @@ -0,0 +1,60 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "net/http" +) + +type CommandWebhook struct { + Id string + CreateAt int64 + CommandId string + UserId string + ChannelId string + RootId string + UseCount int +} + +const ( + CommandWebhookLifetime = 1000 * 60 * 30 +) + +func (o *CommandWebhook) PreSave() { + if o.Id == "" { + o.Id = NewId() + } + + if o.CreateAt == 0 { + o.CreateAt = GetMillis() + } +} + +func (o *CommandWebhook) IsValid() *AppError { + if !IsValidId(o.Id) { + return NewAppError("CommandWebhook.IsValid", "model.command_hook.id.app_error", nil, "", http.StatusBadRequest) + } + + if o.CreateAt == 0 { + return NewAppError("CommandWebhook.IsValid", "model.command_hook.create_at.app_error", nil, "id="+o.Id, http.StatusBadRequest) + } + + if !IsValidId(o.CommandId) { + return NewAppError("CommandWebhook.IsValid", "model.command_hook.command_id.app_error", nil, "", http.StatusBadRequest) + } + + if !IsValidId(o.UserId) { + return NewAppError("CommandWebhook.IsValid", "model.command_hook.user_id.app_error", nil, "", http.StatusBadRequest) + } + + if !IsValidId(o.ChannelId) { + return NewAppError("CommandWebhook.IsValid", "model.command_hook.channel_id.app_error", nil, "", http.StatusBadRequest) + } + + if o.RootId != "" && !IsValidId(o.RootId) { + return NewAppError("CommandWebhook.IsValid", "model.command_hook.root_id.app_error", nil, "", http.StatusBadRequest) + } + + return nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/compliance.go b/vendor/github.com/mattermost/mattermost/server/public/model/compliance.go new file mode 100644 index 00000000..d91267ae --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/compliance.go @@ -0,0 +1,141 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "net/http" + "strings" + + "github.com/mattermost/mattermost/server/public/shared/mlog" +) + +const ( + ComplianceStatusCreated = "created" + ComplianceStatusRunning = "running" + ComplianceStatusFinished = "finished" + ComplianceStatusFailed = "failed" + ComplianceStatusRemoved = "removed" + + ComplianceTypeDaily = "daily" + ComplianceTypeAdhoc = "adhoc" +) + +type Compliance struct { + Id string `json:"id"` + CreateAt int64 `json:"create_at"` + UserId string `json:"user_id"` + Status string `json:"status"` + Count int `json:"count"` + Desc string `json:"desc"` + Type string `json:"type"` + StartAt int64 `json:"start_at"` + EndAt int64 `json:"end_at"` + Keywords string `json:"keywords"` + Emails string `json:"emails"` +} + +func (c *Compliance) Auditable() map[string]any { + return map[string]any{ + "id": c.Id, + "create_at": c.CreateAt, + "user_id": c.UserId, + "status": c.Status, + "count": c.Count, + "desc": c.Desc, + "type": c.Type, + "start_at": c.StartAt, + "end_at": c.EndAt, + "keywords": c.Keywords, + "emails": c.Emails, + } +} + +type Compliances []Compliance + +// ComplianceExportCursor is used for paginated iteration of posts +// for compliance export. +// We need to keep track of the last post ID in addition to the last post +// CreateAt to break ties when two posts have the same CreateAt. +type ComplianceExportCursor struct { + LastChannelsQueryPostCreateAt int64 + LastChannelsQueryPostID string + ChannelsQueryCompleted bool + LastDirectMessagesQueryPostCreateAt int64 + LastDirectMessagesQueryPostID string + DirectMessagesQueryCompleted bool +} + +func (c *Compliance) PreSave() { + if c.Id == "" { + c.Id = NewId() + } + + if c.Status == "" { + c.Status = ComplianceStatusCreated + } + + c.Count = 0 + c.Emails = NormalizeEmail(c.Emails) + c.Keywords = strings.ToLower(c.Keywords) + + c.CreateAt = GetMillis() +} + +func (c *Compliance) DeepCopy() *Compliance { + cCopy := *c + return &cCopy +} + +func (c *Compliance) JobName() string { + jobName := c.Type + if c.Type == ComplianceTypeDaily { + jobName += "-" + c.Desc + } + + jobName += "-" + c.Id + + return jobName +} + +func (c *Compliance) IsValid() *AppError { + if !IsValidId(c.Id) { + return NewAppError("Compliance.IsValid", "model.compliance.is_valid.id.app_error", nil, "", http.StatusBadRequest) + } + + if c.CreateAt == 0 { + return NewAppError("Compliance.IsValid", "model.compliance.is_valid.create_at.app_error", nil, "", http.StatusBadRequest) + } + + if len(c.Desc) > 512 || c.Desc == "" { + return NewAppError("Compliance.IsValid", "model.compliance.is_valid.desc.app_error", nil, "", http.StatusBadRequest) + } + + if c.StartAt == 0 { + return NewAppError("Compliance.IsValid", "model.compliance.is_valid.start_at.app_error", nil, "", http.StatusBadRequest) + } + + if c.EndAt == 0 { + return NewAppError("Compliance.IsValid", "model.compliance.is_valid.end_at.app_error", nil, "", http.StatusBadRequest) + } + + if c.EndAt <= c.StartAt { + return NewAppError("Compliance.IsValid", "model.compliance.is_valid.start_end_at.app_error", nil, "", http.StatusBadRequest) + } + + return nil +} + +// LoggerFields returns the logger annotations reflecting the given compliance job metadata. +func (c *Compliance) LoggerFields() []mlog.Field { + if c == nil { + return nil + } + + return []mlog.Field{ + mlog.String("job_id", c.Id), + mlog.String("job_type", c.Type), + mlog.String("job_name", c.JobName()), + mlog.Millis("job_create_at", c.CreateAt), + } +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/compliance_post.go b/vendor/github.com/mattermost/mattermost/server/public/model/compliance_post.go new file mode 100644 index 00000000..f86ab2fe --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/compliance_post.go @@ -0,0 +1,120 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "regexp" + "time" +) + +type CompliancePost struct { + + // From Team + TeamName string + TeamDisplayName string + + // From Channel + ChannelName string + ChannelDisplayName string + ChannelType string + + // From User + UserUsername string + UserEmail string + UserNickname string + + // From Post + PostId string + PostCreateAt int64 + PostUpdateAt int64 + PostDeleteAt int64 + PostRootId string + PostOriginalId string + PostMessage string + PostType string + PostProps string + PostHashtags string + PostFileIds string + + IsBot bool +} + +func CompliancePostHeader() []string { + return []string{ + "TeamName", + "TeamDisplayName", + + "ChannelName", + "ChannelDisplayName", + "ChannelType", + + "UserUsername", + "UserEmail", + "UserNickname", + "UserType", + + "PostId", + "PostCreateAt", + "PostUpdateAt", + "PostDeleteAt", + "PostRootId", + "PostOriginalId", + "PostMessage", + "PostType", + "PostProps", + "PostHashtags", + "PostFileIds", + } +} + +func cleanComplianceStrings(in string) string { + if matched, _ := regexp.MatchString("^\\s*(=|\\+|\\-)", in); matched { + return "'" + in + } + return in +} + +func (cp *CompliancePost) Row() []string { + postDeleteAt := "" + if cp.PostDeleteAt > 0 { + postDeleteAt = time.Unix(0, cp.PostDeleteAt*int64(1000*1000)).Format(time.RFC3339) + } + + postUpdateAt := "" + if cp.PostUpdateAt != cp.PostCreateAt { + postUpdateAt = time.Unix(0, cp.PostUpdateAt*int64(1000*1000)).Format(time.RFC3339) + } + + userType := "user" + if cp.IsBot { + userType = "bot" + } + + return []string{ + cleanComplianceStrings(cp.TeamName), + cleanComplianceStrings(cp.TeamDisplayName), + + cleanComplianceStrings(cp.ChannelName), + cleanComplianceStrings(cp.ChannelDisplayName), + cleanComplianceStrings(cp.ChannelType), + + cleanComplianceStrings(cp.UserUsername), + cleanComplianceStrings(cp.UserEmail), + cleanComplianceStrings(cp.UserNickname), + userType, + + cp.PostId, + time.Unix(0, cp.PostCreateAt*int64(1000*1000)).Format(time.RFC3339), + postUpdateAt, + postDeleteAt, + + cp.PostRootId, + cp.PostOriginalId, + cleanComplianceStrings(cp.PostMessage), + cp.PostType, + cp.PostProps, + cp.PostHashtags, + cp.PostFileIds, + } +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/config.go b/vendor/github.com/mattermost/mattermost/server/public/model/config.go new file mode 100644 index 00000000..842a6a54 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/config.go @@ -0,0 +1,5266 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "crypto/tls" + "encoding/json" + "io" + "math" + "net" + "net/http" + "net/url" + "os" + "path/filepath" + "reflect" + "regexp" + "slices" + "strconv" + "strings" + "time" + + "github.com/mattermost/ldap" + "github.com/pkg/errors" + + "github.com/mattermost/mattermost/server/public/shared/mlog" + "github.com/mattermost/mattermost/server/public/utils" +) + +const ( + ConnSecurityNone = "" + ConnSecurityPlain = "PLAIN" + ConnSecurityTLS = "TLS" + ConnSecurityStarttls = "STARTTLS" + + ImageDriverLocal = "local" + ImageDriverS3 = "amazons3" + + DatabaseDriverPostgres = "postgres" + + SearchengineElasticsearch = "elasticsearch" + + MinioAccessKey = "minioaccesskey" + MinioSecretKey = "miniosecretkey" + MinioBucket = "mattermost-test" + + PasswordMaximumLength = 72 + PasswordMinimumLength = 5 + + ServiceGitlab = "gitlab" + + ServiceGoogle = "google" + ServiceOffice365 = "office365" + ServiceOpenid = "openid" + + GenericNoChannelNotification = "generic_no_channel" + GenericNotification = "generic" + GenericNotificationServer = "https://push-test.mattermost.com" + MmSupportAdvisorAddress = "support-advisor@mattermost.com" + FullNotification = "full" + IdLoadedNotification = "id_loaded" + + DirectMessageAny = "any" + DirectMessageTeam = "team" + + ShowUsername = "username" + ShowNicknameFullName = "nickname_full_name" + ShowFullName = "full_name" + + PermissionsAll = "all" + PermissionsChannelAdmin = "channel_admin" + PermissionsTeamAdmin = "team_admin" + PermissionsSystemAdmin = "system_admin" + + FakeSetting = "********************************" + + // SanitizedPassword is the placeholder used for redacting passwords in data sources + SanitizedPassword = "****" + + RestrictEmojiCreationAll = "all" + RestrictEmojiCreationAdmin = "admin" + RestrictEmojiCreationSystemAdmin = "system_admin" + + PermissionsDeletePostAll = "all" + PermissionsDeletePostTeamAdmin = "team_admin" + PermissionsDeletePostSystemAdmin = "system_admin" + + GroupUnreadChannelsDisabled = "disabled" + GroupUnreadChannelsDefaultOn = "default_on" + GroupUnreadChannelsDefaultOff = "default_off" + + CollapsedThreadsDisabled = "disabled" + CollapsedThreadsDefaultOn = "default_on" + CollapsedThreadsDefaultOff = "default_off" + CollapsedThreadsAlwaysOn = "always_on" + + EmailBatchingBufferSize = 256 + EmailBatchingInterval = 30 + + EmailNotificationContentsFull = "full" + EmailNotificationContentsGeneric = "generic" + + EmailSMTPDefaultServer = "localhost" + EmailSMTPDefaultPort = "10025" + + CacheTypeLRU = "lru" + CacheTypeRedis = "redis" + + SitenameMaxLength = 30 + + ServiceSettingsDefaultSiteURL = "http://localhost:8065" + ServiceSettingsDefaultTLSCertFile = "" + ServiceSettingsDefaultTLSKeyFile = "" + ServiceSettingsDefaultReadTimeout = 300 + ServiceSettingsDefaultWriteTimeout = 300 + ServiceSettingsDefaultIdleTimeout = 60 + ServiceSettingsDefaultMaxLoginAttempts = 10 + ServiceSettingsDefaultAllowCorsFrom = "" + ServiceSettingsDefaultListenAndAddress = ":8065" + ServiceSettingsDefaultGiphySdkKeyTest = "s0glxvzVg9azvPipKxcPLpXV0q1x1fVP" + ServiceSettingsDefaultDeveloperFlags = "" + ServiceSettingsDefaultUniqueReactionsPerPost = 50 + ServiceSettingsDefaultMaxURLLength = 2048 + ServiceSettingsMaxUniqueReactionsPerPost = 500 + + TeamSettingsDefaultSiteName = "Mattermost" + TeamSettingsDefaultMaxUsersPerTeam = 50 + TeamSettingsDefaultCustomBrandText = "" + TeamSettingsDefaultCustomDescriptionText = "" + TeamSettingsDefaultUserStatusAwayTimeout = 300 + + SqlSettingsDefaultDataSource = "postgres://mmuser:mostest@localhost/mattermost_test?sslmode=disable&connect_timeout=10&binary_parameters=yes" + + FileSettingsDefaultDirectory = "./data/" + FileSettingsDefaultS3UploadPartSizeBytes = 5 * 1024 * 1024 // 5MB + FileSettingsDefaultS3ExportUploadPartSizeBytes = 100 * 1024 * 1024 // 100MB + + ImportSettingsDefaultDirectory = "./import" + ImportSettingsDefaultRetentionDays = 30 + + ExportSettingsDefaultDirectory = "./export" + ExportSettingsDefaultRetentionDays = 30 + + EmailSettingsDefaultFeedbackOrganization = "" + + SupportSettingsDefaultTermsOfServiceLink = "https://mattermost.com/pl/terms-of-use/" + SupportSettingsDefaultPrivacyPolicyLink = "https://mattermost.com/pl/privacy-policy/" + SupportSettingsDefaultAboutLink = "https://mattermost.com/pl/about-mattermost" + SupportSettingsDefaultHelpLink = "https://mattermost.com/pl/help/" + SupportSettingsDefaultReportAProblemLink = "https://mattermost.com/pl/report-a-bug" + SupportSettingsDefaultSupportEmail = "" + SupportSettingsDefaultReAcceptancePeriod = 365 + + SupportSettingsReportAProblemTypeLink = "link" + SupportSettingsReportAProblemTypeMail = "email" + SupportSettingsReportAProblemTypeHidden = "hidden" + SupportSettingsReportAProblemTypeDefault = "default" + SupportSettingsDefaultReportAProblemType = SupportSettingsReportAProblemTypeDefault + + LdapSettingsDefaultFirstNameAttribute = "" + LdapSettingsDefaultLastNameAttribute = "" + LdapSettingsDefaultEmailAttribute = "" + LdapSettingsDefaultUsernameAttribute = "" + LdapSettingsDefaultNicknameAttribute = "" + LdapSettingsDefaultIdAttribute = "" + LdapSettingsDefaultPositionAttribute = "" + LdapSettingsDefaultLoginFieldName = "" + LdapSettingsDefaultGroupDisplayNameAttribute = "" + LdapSettingsDefaultGroupIdAttribute = "" + LdapSettingsDefaultPictureAttribute = "" + LdapSettingsDefaultMaximumLoginAttempts = 10 + + SamlSettingsDefaultIdAttribute = "" + SamlSettingsDefaultGuestAttribute = "" + SamlSettingsDefaultAdminAttribute = "" + SamlSettingsDefaultFirstNameAttribute = "" + SamlSettingsDefaultLastNameAttribute = "" + SamlSettingsDefaultEmailAttribute = "" + SamlSettingsDefaultUsernameAttribute = "" + SamlSettingsDefaultNicknameAttribute = "" + SamlSettingsDefaultLocaleAttribute = "" + SamlSettingsDefaultPositionAttribute = "" + + SamlSettingsSignatureAlgorithmSha1 = "RSAwithSHA1" + SamlSettingsSignatureAlgorithmSha256 = "RSAwithSHA256" + SamlSettingsSignatureAlgorithmSha512 = "RSAwithSHA512" + SamlSettingsDefaultSignatureAlgorithm = SamlSettingsSignatureAlgorithmSha256 + + SamlSettingsCanonicalAlgorithmC14n = "Canonical1.0" + SamlSettingsCanonicalAlgorithmC14n11 = "Canonical1.1" + SamlSettingsDefaultCanonicalAlgorithm = SamlSettingsCanonicalAlgorithmC14n + + NativeappSettingsDefaultAppDownloadLink = "https://mattermost.com/pl/download-apps" + NativeappSettingsDefaultAndroidAppDownloadLink = "https://mattermost.com/pl/android-app/" + NativeappSettingsDefaultIosAppDownloadLink = "https://mattermost.com/pl/ios-app/" + + ExperimentalSettingsDefaultLinkMetadataTimeoutMilliseconds = 5000 + ExperimentalSettingsDefaultUsersStatusAndProfileFetchingPollIntervalMilliseconds = 3000 + + AnalyticsSettingsDefaultMaxUsersForStatistics = 2500 + + AnnouncementSettingsDefaultBannerColor = "#f2a93b" + AnnouncementSettingsDefaultBannerTextColor = "#333333" + AnnouncementSettingsDefaultNoticesJsonURL = "https://notices.mattermost.com/" + AnnouncementSettingsDefaultNoticesFetchFrequencySeconds = 3600 + + TeamSettingsDefaultTeamText = "default" + + ElasticsearchSettingsDefaultConnectionURL = "http://localhost:9200" + ElasticsearchSettingsDefaultUsername = "elastic" + ElasticsearchSettingsDefaultPassword = "changeme" + ElasticsearchSettingsDefaultPostIndexReplicas = 1 + ElasticsearchSettingsDefaultPostIndexShards = 1 + ElasticsearchSettingsDefaultChannelIndexReplicas = 1 + ElasticsearchSettingsDefaultChannelIndexShards = 1 + ElasticsearchSettingsDefaultUserIndexReplicas = 1 + ElasticsearchSettingsDefaultUserIndexShards = 1 + ElasticsearchSettingsDefaultAggregatePostsAfterDays = 365 + ElasticsearchSettingsDefaultPostsAggregatorJobStartTime = "03:00" + ElasticsearchSettingsDefaultIndexPrefix = "" + ElasticsearchSettingsDefaultLiveIndexingBatchSize = 10 + ElasticsearchSettingsDefaultRequestTimeoutSeconds = 30 + ElasticsearchSettingsDefaultBatchSize = 10000 + ElasticsearchSettingsESBackend = "elasticsearch" + ElasticsearchSettingsOSBackend = "opensearch" + + DataRetentionSettingsDefaultMessageRetentionDays = 365 + DataRetentionSettingsDefaultMessageRetentionHours = 0 + DataRetentionSettingsDefaultFileRetentionDays = 365 + DataRetentionSettingsDefaultFileRetentionHours = 0 + DataRetentionSettingsDefaultBoardsRetentionDays = 365 + DataRetentionSettingsDefaultDeletionJobStartTime = "02:00" + DataRetentionSettingsDefaultBatchSize = 3000 + DataRetentionSettingsDefaultTimeBetweenBatchesMilliseconds = 100 + DataRetentionSettingsDefaultRetentionIdsBatchSize = 100 + + OutgoingIntegrationRequestsDefaultTimeout = 30 + + PluginSettingsDefaultDirectory = "./plugins" + PluginSettingsDefaultClientDirectory = "./client/plugins" + PluginSettingsDefaultEnableMarketplace = true + PluginSettingsDefaultMarketplaceURL = "https://api.integrations.mattermost.com" + PluginSettingsOldMarketplaceURL = "https://marketplace.integrations.mattermost.com" + + ComplianceExportDirectoryFormat = "compliance-export-2006-01-02-15h04m" + ComplianceExportPath = "export" + ComplianceExportPathCLI = "cli" + ComplianceExportTypeCsv = "csv" + ComplianceExportTypeActiance = "actiance" + ComplianceExportTypeGlobalrelay = "globalrelay" + ComplianceExportTypeGlobalrelayZip = "globalrelay-zip" + ComplianceExportChannelBatchSizeDefault = 100 + ComplianceExportChannelHistoryBatchSizeDefault = 10 + + GlobalrelayCustomerTypeA9 = "A9" + GlobalrelayCustomerTypeA10 = "A10" + GlobalrelayCustomerTypeCustom = "CUSTOM" + + ImageProxyTypeLocal = "local" + ImageProxyTypeAtmosCamo = "atmos/camo" + + GoogleSettingsDefaultScope = "profile email" + GoogleSettingsDefaultAuthEndpoint = "https://accounts.google.com/o/oauth2/v2/auth" + GoogleSettingsDefaultTokenEndpoint = "https://www.googleapis.com/oauth2/v4/token" + GoogleSettingsDefaultUserAPIEndpoint = "https://people.googleapis.com/v1/people/me?personFields=names,emailAddresses,nicknames,metadata" + + Office365SettingsDefaultScope = "User.Read" + Office365SettingsDefaultAuthEndpoint = "https://login.microsoftonline.com/common/oauth2/v2.0/authorize" + Office365SettingsDefaultTokenEndpoint = "https://login.microsoftonline.com/common/oauth2/v2.0/token" + Office365SettingsDefaultUserAPIEndpoint = "https://graph.microsoft.com/v1.0/me" + + CloudSettingsDefaultCwsURL = "https://customers.mattermost.com" + CloudSettingsDefaultCwsAPIURL = "https://portal.internal.prod.cloud.mattermost.com" + CloudSettingsDefaultCwsURLTest = "https://portal.test.cloud.mattermost.com" + CloudSettingsDefaultCwsAPIURLTest = "https://api.internal.test.cloud.mattermost.com" + + OpenidSettingsDefaultScope = "profile openid email" + + LocalModeSocketPath = "/var/tmp/mattermost_local.socket" + + ConnectedWorkspacesSettingsDefaultMaxPostsPerSync = 50 // a bit more than 4 typical screenfulls of posts + ConnectedWorkspacesSettingsDefaultMemberSyncBatchSize = 20 // optimal batch size for syncing channel members + + // These storage classes are the valid values for the x-amz-storage-class header. More documentation here https://docs.aws.amazon.com/AmazonS3/latest/API/API_PutObject.html#AmazonS3-PutObject-request-header-StorageClass + StorageClassStandard = "STANDARD" + StorageClassReducedRedundancy = "REDUCED_REDUNDANCY" + StorageClassStandardIA = "STANDARD_IA" + StorageClassOnezoneIA = "ONEZONE_IA" + StorageClassIntelligentTiering = "INTELLIGENT_TIERING" + StorageClassGlacier = "GLACIER" + StorageClassDeepArchive = "DEEP_ARCHIVE" + StorageClassOutposts = "OUTPOSTS" + StorageClassGlacierIR = "GLACIER_IR" + StorageClassSnow = "SNOW" + StorageClassExpressOnezone = "EXPRESS_ONEZONE" +) + +func GetDefaultAppCustomURLSchemes() []string { + return []string{"mmauth://", "mmauthbeta://"} +} + +var ServerTLSSupportedCiphers = map[string]uint16{ + "TLS_RSA_WITH_RC4_128_SHA": tls.TLS_RSA_WITH_RC4_128_SHA, + "TLS_RSA_WITH_3DES_EDE_CBC_SHA": tls.TLS_RSA_WITH_3DES_EDE_CBC_SHA, + "TLS_RSA_WITH_AES_128_CBC_SHA": tls.TLS_RSA_WITH_AES_128_CBC_SHA, + "TLS_RSA_WITH_AES_256_CBC_SHA": tls.TLS_RSA_WITH_AES_256_CBC_SHA, + "TLS_RSA_WITH_AES_128_CBC_SHA256": tls.TLS_RSA_WITH_AES_128_CBC_SHA256, + "TLS_RSA_WITH_AES_128_GCM_SHA256": tls.TLS_RSA_WITH_AES_128_GCM_SHA256, + "TLS_RSA_WITH_AES_256_GCM_SHA384": tls.TLS_RSA_WITH_AES_256_GCM_SHA384, + "TLS_ECDHE_ECDSA_WITH_RC4_128_SHA": tls.TLS_ECDHE_ECDSA_WITH_RC4_128_SHA, + "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA": tls.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA, + "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA": tls.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA, + "TLS_ECDHE_RSA_WITH_RC4_128_SHA": tls.TLS_ECDHE_RSA_WITH_RC4_128_SHA, + "TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA": tls.TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA, + "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA": tls.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA, + "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA": tls.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA, + "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256": tls.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256, + "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256": tls.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256, + "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256": tls.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256, + "TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256": tls.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256, + "TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384": tls.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384, + "TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384": tls.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384, + "TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305": tls.TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305, + "TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305": tls.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305, +} + +type ServiceSettings struct { + SiteURL *string `access:"environment_web_server,authentication_saml,write_restrictable"` + WebsocketURL *string `access:"write_restrictable,cloud_restrictable"` + LicenseFileLocation *string `access:"write_restrictable,cloud_restrictable"` // telemetry: none + ListenAddress *string `access:"environment_web_server,write_restrictable,cloud_restrictable"` // telemetry: none + ConnectionSecurity *string `access:"environment_web_server,write_restrictable,cloud_restrictable"` + TLSCertFile *string `access:"environment_web_server,write_restrictable,cloud_restrictable"` + TLSKeyFile *string `access:"environment_web_server,write_restrictable,cloud_restrictable"` + TLSMinVer *string `access:"write_restrictable,cloud_restrictable"` // telemetry: none + TLSStrictTransport *bool `access:"write_restrictable,cloud_restrictable"` + // In seconds. + TLSStrictTransportMaxAge *int64 `access:"write_restrictable,cloud_restrictable"` // telemetry: none + TLSOverwriteCiphers []string `access:"write_restrictable,cloud_restrictable"` // telemetry: none + UseLetsEncrypt *bool `access:"environment_web_server,write_restrictable,cloud_restrictable"` + LetsEncryptCertificateCacheFile *string `access:"environment_web_server,write_restrictable,cloud_restrictable"` // telemetry: none + Forward80To443 *bool `access:"environment_web_server,write_restrictable,cloud_restrictable"` + TrustedProxyIPHeader []string `access:"write_restrictable,cloud_restrictable"` // telemetry: none + ReadTimeout *int `access:"environment_web_server,write_restrictable,cloud_restrictable"` + WriteTimeout *int `access:"environment_web_server,write_restrictable,cloud_restrictable"` + IdleTimeout *int `access:"write_restrictable,cloud_restrictable"` + MaximumLoginAttempts *int `access:"authentication_password,write_restrictable,cloud_restrictable"` + GoroutineHealthThreshold *int `access:"write_restrictable,cloud_restrictable"` // telemetry: none + EnableOAuthServiceProvider *bool `access:"integrations_integration_management"` + EnableDynamicClientRegistration *bool `access:"integrations_integration_management"` + EnableIncomingWebhooks *bool `access:"integrations_integration_management"` + EnableOutgoingWebhooks *bool `access:"integrations_integration_management"` + EnableOutgoingOAuthConnections *bool `access:"integrations_integration_management"` + EnableCommands *bool `access:"integrations_integration_management"` + OutgoingIntegrationRequestsTimeout *int64 `access:"integrations_integration_management"` // In seconds. + EnablePostUsernameOverride *bool `access:"integrations_integration_management"` + EnablePostIconOverride *bool `access:"integrations_integration_management"` + GoogleDeveloperKey *string `access:"site_posts,write_restrictable,cloud_restrictable"` + EnableLinkPreviews *bool `access:"site_posts"` + EnablePermalinkPreviews *bool `access:"site_posts"` + RestrictLinkPreviews *string `access:"site_posts"` + EnableTesting *bool `access:"environment_developer,write_restrictable,cloud_restrictable"` + EnableDeveloper *bool `access:"environment_developer,write_restrictable,cloud_restrictable"` + DeveloperFlags *string `access:"environment_developer,cloud_restrictable"` + EnableClientPerformanceDebugging *bool `access:"environment_developer,write_restrictable,cloud_restrictable"` + EnableSecurityFixAlert *bool `access:"environment_smtp,write_restrictable,cloud_restrictable"` + EnableInsecureOutgoingConnections *bool `access:"environment_web_server,write_restrictable,cloud_restrictable"` + AllowedUntrustedInternalConnections *string `access:"environment_web_server,write_restrictable,cloud_restrictable"` + EnableMultifactorAuthentication *bool `access:"authentication_mfa"` + EnforceMultifactorAuthentication *bool `access:"authentication_mfa"` + EnableUserAccessTokens *bool `access:"integrations_integration_management"` + AllowCorsFrom *string `access:"integrations_cors,write_restrictable,cloud_restrictable"` + CorsExposedHeaders *string `access:"integrations_cors,write_restrictable,cloud_restrictable"` + CorsAllowCredentials *bool `access:"integrations_cors,write_restrictable,cloud_restrictable"` + CorsDebug *bool `access:"integrations_cors,write_restrictable,cloud_restrictable"` + AllowCookiesForSubdomains *bool `access:"write_restrictable,cloud_restrictable"` + ExtendSessionLengthWithActivity *bool `access:"environment_session_lengths,write_restrictable,cloud_restrictable"` + TerminateSessionsOnPasswordChange *bool `access:"environment_session_lengths,write_restrictable,cloud_restrictable"` + + // Deprecated + SessionLengthWebInDays *int `access:"environment_session_lengths,write_restrictable,cloud_restrictable"` // telemetry: none + SessionLengthWebInHours *int `access:"environment_session_lengths,write_restrictable,cloud_restrictable"` + // Deprecated + SessionLengthMobileInDays *int `access:"environment_session_lengths,write_restrictable,cloud_restrictable"` // telemetry: none + SessionLengthMobileInHours *int `access:"environment_session_lengths,write_restrictable,cloud_restrictable"` + // Deprecated + SessionLengthSSOInDays *int `access:"environment_session_lengths,write_restrictable,cloud_restrictable"` // telemetry: none + SessionLengthSSOInHours *int `access:"environment_session_lengths,write_restrictable,cloud_restrictable"` + + SessionCacheInMinutes *int `access:"environment_session_lengths,write_restrictable,cloud_restrictable"` + SessionIdleTimeoutInMinutes *int `access:"environment_session_lengths,write_restrictable,cloud_restrictable"` + WebsocketSecurePort *int `access:"write_restrictable,cloud_restrictable"` // telemetry: none + WebsocketPort *int `access:"write_restrictable,cloud_restrictable"` // telemetry: none + WebserverMode *string `access:"environment_web_server,write_restrictable,cloud_restrictable"` + EnableGifPicker *bool `access:"integrations_gif"` + GiphySdkKey *string `access:"integrations_gif"` + EnableCustomEmoji *bool `access:"site_emoji"` + EnableEmojiPicker *bool `access:"site_emoji"` + PostEditTimeLimit *int `access:"user_management_permissions"` + TimeBetweenUserTypingUpdatesMilliseconds *int64 `access:"experimental_features,write_restrictable,cloud_restrictable"` + EnableCrossTeamSearch *bool `access:"write_restrictable,cloud_restrictable"` + EnablePostSearch *bool `access:"write_restrictable,cloud_restrictable"` + EnableFileSearch *bool `access:"write_restrictable"` + MinimumHashtagLength *int `access:"environment_database,write_restrictable,cloud_restrictable"` + EnableUserTypingMessages *bool `access:"experimental_features,write_restrictable,cloud_restrictable"` + EnableChannelViewedMessages *bool `access:"experimental_features,write_restrictable,cloud_restrictable"` + EnableUserStatuses *bool `access:"write_restrictable,cloud_restrictable"` + ExperimentalEnableAuthenticationTransfer *bool `access:"experimental_features"` + ClusterLogTimeoutMilliseconds *int `access:"write_restrictable,cloud_restrictable"` + EnableTutorial *bool `access:"experimental_features"` + EnableOnboardingFlow *bool `access:"experimental_features"` + ExperimentalEnableDefaultChannelLeaveJoinMessages *bool `access:"experimental_features"` + ExperimentalGroupUnreadChannels *string `access:"experimental_features"` + EnableAPITeamDeletion *bool + EnableAPITriggerAdminNotifications *bool + EnableAPIUserDeletion *bool + EnableAPIPostDeletion *bool + EnableDesktopLandingPage *bool + ExperimentalEnableHardenedMode *bool `access:"experimental_features"` + ExperimentalStrictCSRFEnforcement *bool `access:"experimental_features,write_restrictable,cloud_restrictable"` + EnableEmailInvitations *bool `access:"authentication_signup"` + DisableBotsWhenOwnerIsDeactivated *bool `access:"integrations_bot_accounts"` + EnableBotAccountCreation *bool `access:"integrations_bot_accounts"` + EnableSVGs *bool `access:"site_posts"` + EnableLatex *bool `access:"site_posts"` + EnableInlineLatex *bool `access:"site_posts"` + PostPriority *bool `access:"site_posts"` + AllowPersistentNotifications *bool `access:"site_posts"` + AllowPersistentNotificationsForGuests *bool `access:"site_posts"` + PersistentNotificationIntervalMinutes *int `access:"site_posts"` + PersistentNotificationMaxCount *int `access:"site_posts"` + PersistentNotificationMaxRecipients *int `access:"site_posts"` + EnableAPIChannelDeletion *bool + EnableLocalMode *bool `access:"cloud_restrictable"` + LocalModeSocketLocation *string `access:"cloud_restrictable"` // telemetry: none + EnableAWSMetering *bool // telemetry: none + SplitKey *string `access:"experimental_feature_flags,write_restrictable"` // telemetry: none + FeatureFlagSyncIntervalSeconds *int `access:"experimental_feature_flags,write_restrictable"` // telemetry: none + DebugSplit *bool `access:"experimental_feature_flags,write_restrictable"` // telemetry: none + ThreadAutoFollow *bool `access:"experimental_features"` + CollapsedThreads *string `access:"experimental_features"` + ManagedResourcePaths *string `access:"environment_web_server,write_restrictable,cloud_restrictable"` + EnableCustomGroups *bool `access:"site_users_and_teams"` + AllowSyncedDrafts *bool `access:"site_posts"` + UniqueEmojiReactionLimitPerPost *int `access:"site_posts"` + RefreshPostStatsRunTime *string `access:"site_users_and_teams"` + MaximumPayloadSizeBytes *int64 `access:"environment_file_storage,write_restrictable,cloud_restrictable"` + MaximumURLLength *int `access:"environment_file_storage,write_restrictable,cloud_restrictable"` + ScheduledPosts *bool `access:"site_posts"` + EnableWebHubChannelIteration *bool `access:"write_restrictable,cloud_restrictable"` // telemetry: none + FrameAncestors *string `access:"write_restrictable,cloud_restrictable"` // telemetry: none + DeleteAccountLink *string `access:"site_users_and_teams,write_restrictable,cloud_restrictable"` +} + +var MattermostGiphySdkKey string + +func (s *ServiceSettings) SetDefaults(isUpdate bool) { + if s.EnableEmailInvitations == nil { + // If the site URL is also not present then assume this is a clean install + if s.SiteURL == nil { + s.EnableEmailInvitations = NewPointer(false) + } else { + s.EnableEmailInvitations = NewPointer(true) + } + } + + if s.SiteURL == nil { + if s.EnableDeveloper != nil && *s.EnableDeveloper { + s.SiteURL = NewPointer(ServiceSettingsDefaultSiteURL) + } else { + s.SiteURL = NewPointer("") + } + } + + if s.WebsocketURL == nil { + s.WebsocketURL = NewPointer("") + } + + if s.LicenseFileLocation == nil { + s.LicenseFileLocation = NewPointer("") + } + + if s.ListenAddress == nil { + s.ListenAddress = NewPointer(ServiceSettingsDefaultListenAndAddress) + } + + if s.EnableLinkPreviews == nil { + s.EnableLinkPreviews = NewPointer(true) + } + + if s.EnablePermalinkPreviews == nil { + s.EnablePermalinkPreviews = NewPointer(true) + } + + if s.RestrictLinkPreviews == nil { + s.RestrictLinkPreviews = NewPointer("") + } + + if s.EnableTesting == nil { + s.EnableTesting = NewPointer(false) + } + + if s.EnableDeveloper == nil { + s.EnableDeveloper = NewPointer(false) + } + + if s.DeveloperFlags == nil { + s.DeveloperFlags = NewPointer("") + } + + if s.EnableClientPerformanceDebugging == nil { + s.EnableClientPerformanceDebugging = NewPointer(false) + } + + if s.EnableSecurityFixAlert == nil { + s.EnableSecurityFixAlert = NewPointer(true) + } + + if s.EnableInsecureOutgoingConnections == nil { + s.EnableInsecureOutgoingConnections = NewPointer(false) + } + + if s.AllowedUntrustedInternalConnections == nil { + s.AllowedUntrustedInternalConnections = NewPointer("") + } + + if s.EnableMultifactorAuthentication == nil { + s.EnableMultifactorAuthentication = NewPointer(false) + } + + if s.EnforceMultifactorAuthentication == nil { + s.EnforceMultifactorAuthentication = NewPointer(false) + } + + if s.EnableUserAccessTokens == nil { + s.EnableUserAccessTokens = NewPointer(false) + } + + if s.GoroutineHealthThreshold == nil { + s.GoroutineHealthThreshold = NewPointer(-1) + } + + if s.GoogleDeveloperKey == nil { + s.GoogleDeveloperKey = NewPointer("") + } + + if s.EnableOAuthServiceProvider == nil { + s.EnableOAuthServiceProvider = NewPointer(true) + } + + if s.EnableDynamicClientRegistration == nil { + s.EnableDynamicClientRegistration = NewPointer(false) + } + + if s.EnableIncomingWebhooks == nil { + s.EnableIncomingWebhooks = NewPointer(true) + } + + if s.EnableOutgoingWebhooks == nil { + s.EnableOutgoingWebhooks = NewPointer(true) + } + + if s.EnableOutgoingOAuthConnections == nil { + s.EnableOutgoingOAuthConnections = NewPointer(false) + } + + if s.OutgoingIntegrationRequestsTimeout == nil { + s.OutgoingIntegrationRequestsTimeout = NewPointer(int64(OutgoingIntegrationRequestsDefaultTimeout)) + } + + if s.ConnectionSecurity == nil { + s.ConnectionSecurity = NewPointer("") + } + + if s.TLSKeyFile == nil { + s.TLSKeyFile = NewPointer(ServiceSettingsDefaultTLSKeyFile) + } + + if s.TLSCertFile == nil { + s.TLSCertFile = NewPointer(ServiceSettingsDefaultTLSCertFile) + } + + if s.TLSMinVer == nil { + s.TLSMinVer = NewPointer("1.2") + } + + if s.TLSStrictTransport == nil { + s.TLSStrictTransport = NewPointer(false) + } + + if s.TLSStrictTransportMaxAge == nil { + s.TLSStrictTransportMaxAge = NewPointer(int64(63072000)) + } + + if s.TLSOverwriteCiphers == nil { + s.TLSOverwriteCiphers = []string{} + } + + if s.UseLetsEncrypt == nil { + s.UseLetsEncrypt = NewPointer(false) + } + + if s.LetsEncryptCertificateCacheFile == nil { + s.LetsEncryptCertificateCacheFile = NewPointer("./config/letsencrypt.cache") + } + + if s.ReadTimeout == nil { + s.ReadTimeout = NewPointer(ServiceSettingsDefaultReadTimeout) + } + + if s.WriteTimeout == nil { + s.WriteTimeout = NewPointer(ServiceSettingsDefaultWriteTimeout) + } + + if s.IdleTimeout == nil { + s.IdleTimeout = NewPointer(ServiceSettingsDefaultIdleTimeout) + } + + if s.MaximumLoginAttempts == nil { + s.MaximumLoginAttempts = NewPointer(ServiceSettingsDefaultMaxLoginAttempts) + } + + if s.Forward80To443 == nil { + s.Forward80To443 = NewPointer(false) + } + + if s.TrustedProxyIPHeader == nil { + s.TrustedProxyIPHeader = []string{} + } + + if s.TimeBetweenUserTypingUpdatesMilliseconds == nil { + s.TimeBetweenUserTypingUpdatesMilliseconds = NewPointer(int64(5000)) + } + + if s.EnableCrossTeamSearch == nil { + s.EnableCrossTeamSearch = NewPointer(true) + } + + if s.EnablePostSearch == nil { + s.EnablePostSearch = NewPointer(true) + } + + if s.EnableFileSearch == nil { + s.EnableFileSearch = NewPointer(true) + } + + if s.MinimumHashtagLength == nil { + s.MinimumHashtagLength = NewPointer(3) + } + + if s.EnableUserTypingMessages == nil { + s.EnableUserTypingMessages = NewPointer(true) + } + + if s.EnableChannelViewedMessages == nil { + s.EnableChannelViewedMessages = NewPointer(true) + } + + if s.EnableUserStatuses == nil { + s.EnableUserStatuses = NewPointer(true) + } + + if s.ClusterLogTimeoutMilliseconds == nil { + s.ClusterLogTimeoutMilliseconds = NewPointer(2000) + } + + if s.EnableTutorial == nil { + s.EnableTutorial = NewPointer(true) + } + + if s.EnableOnboardingFlow == nil { + s.EnableOnboardingFlow = NewPointer(true) + } + + // Must be manually enabled for existing installations. + if s.ExtendSessionLengthWithActivity == nil { + s.ExtendSessionLengthWithActivity = NewPointer(!isUpdate) + } + + // Must be manually enabled for existing installations. + if s.TerminateSessionsOnPasswordChange == nil { + s.TerminateSessionsOnPasswordChange = NewPointer(!isUpdate) + } + + if s.SessionLengthWebInDays == nil { + if isUpdate { + s.SessionLengthWebInDays = NewPointer(180) + } else { + s.SessionLengthWebInDays = NewPointer(30) + } + } + + if s.SessionLengthWebInHours == nil { + var webTTLDays int + if s.SessionLengthWebInDays == nil { + if isUpdate { + webTTLDays = 180 + } else { + webTTLDays = 30 + } + } else { + webTTLDays = *s.SessionLengthWebInDays + } + s.SessionLengthWebInHours = NewPointer(webTTLDays * 24) + } + + if s.SessionLengthMobileInDays == nil { + if isUpdate { + s.SessionLengthMobileInDays = NewPointer(180) + } else { + s.SessionLengthMobileInDays = NewPointer(30) + } + } + + if s.SessionLengthMobileInHours == nil { + var mobileTTLDays int + if s.SessionLengthMobileInDays == nil { + if isUpdate { + mobileTTLDays = 180 + } else { + mobileTTLDays = 30 + } + } else { + mobileTTLDays = *s.SessionLengthMobileInDays + } + s.SessionLengthMobileInHours = NewPointer(mobileTTLDays * 24) + } + + if s.SessionLengthSSOInDays == nil { + s.SessionLengthSSOInDays = NewPointer(30) + } + + if s.SessionLengthSSOInHours == nil { + var ssoTTLDays int + if s.SessionLengthSSOInDays == nil { + ssoTTLDays = 30 + } else { + ssoTTLDays = *s.SessionLengthSSOInDays + } + s.SessionLengthSSOInHours = NewPointer(ssoTTLDays * 24) + } + + if s.SessionCacheInMinutes == nil { + s.SessionCacheInMinutes = NewPointer(10) + } + + if s.SessionIdleTimeoutInMinutes == nil { + s.SessionIdleTimeoutInMinutes = NewPointer(43200) + } + + if s.EnableCommands == nil { + s.EnableCommands = NewPointer(true) + } + + if s.EnablePostUsernameOverride == nil { + s.EnablePostUsernameOverride = NewPointer(false) + } + + if s.EnablePostIconOverride == nil { + s.EnablePostIconOverride = NewPointer(false) + } + + if s.WebsocketPort == nil { + s.WebsocketPort = NewPointer(80) + } + + if s.WebsocketSecurePort == nil { + s.WebsocketSecurePort = NewPointer(443) + } + + if s.AllowCorsFrom == nil { + s.AllowCorsFrom = NewPointer(ServiceSettingsDefaultAllowCorsFrom) + } + + if s.CorsExposedHeaders == nil { + s.CorsExposedHeaders = NewPointer("") + } + + if s.CorsAllowCredentials == nil { + s.CorsAllowCredentials = NewPointer(false) + } + + if s.CorsDebug == nil { + s.CorsDebug = NewPointer(false) + } + + if s.AllowCookiesForSubdomains == nil { + s.AllowCookiesForSubdomains = NewPointer(false) + } + + if s.WebserverMode == nil { + s.WebserverMode = NewPointer("gzip") + } else if *s.WebserverMode == "regular" { + *s.WebserverMode = "gzip" + } + + if s.EnableCustomEmoji == nil { + s.EnableCustomEmoji = NewPointer(true) + } + + if s.EnableEmojiPicker == nil { + s.EnableEmojiPicker = NewPointer(true) + } + + if s.EnableGifPicker == nil { + s.EnableGifPicker = NewPointer(true) + } + + if s.GiphySdkKey == nil || *s.GiphySdkKey == "" { + s.GiphySdkKey = NewPointer("") + } + + if s.ExperimentalEnableAuthenticationTransfer == nil { + s.ExperimentalEnableAuthenticationTransfer = NewPointer(true) + } + + if s.PostEditTimeLimit == nil { + s.PostEditTimeLimit = NewPointer(-1) + } + + if s.ExperimentalEnableDefaultChannelLeaveJoinMessages == nil { + s.ExperimentalEnableDefaultChannelLeaveJoinMessages = NewPointer(true) + } + + if s.ExperimentalGroupUnreadChannels == nil { + s.ExperimentalGroupUnreadChannels = NewPointer(GroupUnreadChannelsDisabled) + } else if *s.ExperimentalGroupUnreadChannels == "0" { + s.ExperimentalGroupUnreadChannels = NewPointer(GroupUnreadChannelsDisabled) + } else if *s.ExperimentalGroupUnreadChannels == "1" { + s.ExperimentalGroupUnreadChannels = NewPointer(GroupUnreadChannelsDefaultOn) + } + + if s.EnableAPITeamDeletion == nil { + s.EnableAPITeamDeletion = NewPointer(false) + } + + if s.EnableAPITriggerAdminNotifications == nil { + s.EnableAPITriggerAdminNotifications = NewPointer(false) + } + + if s.EnableAPIUserDeletion == nil { + s.EnableAPIUserDeletion = NewPointer(false) + } + + if s.EnableAPIPostDeletion == nil { + s.EnableAPIPostDeletion = NewPointer(false) + } + + if s.EnableAPIChannelDeletion == nil { + s.EnableAPIChannelDeletion = NewPointer(false) + } + + if s.ExperimentalEnableHardenedMode == nil { + s.ExperimentalEnableHardenedMode = NewPointer(false) + } + + if s.ExperimentalStrictCSRFEnforcement == nil { + s.ExperimentalStrictCSRFEnforcement = NewPointer(false) + } + + if s.DisableBotsWhenOwnerIsDeactivated == nil { + s.DisableBotsWhenOwnerIsDeactivated = NewPointer(true) + } + + if s.EnableBotAccountCreation == nil { + s.EnableBotAccountCreation = NewPointer(false) + } + + if s.EnableDesktopLandingPage == nil { + s.EnableDesktopLandingPage = NewPointer(true) + } + + if s.EnableSVGs == nil { + if isUpdate { + s.EnableSVGs = NewPointer(true) + } else { + s.EnableSVGs = NewPointer(false) + } + } + + if s.EnableLatex == nil { + if isUpdate { + s.EnableLatex = NewPointer(true) + } else { + s.EnableLatex = NewPointer(false) + } + } + + if s.EnableInlineLatex == nil { + s.EnableInlineLatex = NewPointer(true) + } + + if s.EnableLocalMode == nil { + s.EnableLocalMode = NewPointer(false) + } + + if s.LocalModeSocketLocation == nil { + s.LocalModeSocketLocation = NewPointer(LocalModeSocketPath) + } + + if s.EnableAWSMetering == nil { + s.EnableAWSMetering = NewPointer(false) + } + + if s.SplitKey == nil { + s.SplitKey = NewPointer("") + } + + if s.FeatureFlagSyncIntervalSeconds == nil { + s.FeatureFlagSyncIntervalSeconds = NewPointer(30) + } + + if s.DebugSplit == nil { + s.DebugSplit = NewPointer(false) + } + + if s.ThreadAutoFollow == nil { + s.ThreadAutoFollow = NewPointer(true) + } + + if s.CollapsedThreads == nil { + s.CollapsedThreads = NewPointer(CollapsedThreadsAlwaysOn) + } + + if s.ManagedResourcePaths == nil { + s.ManagedResourcePaths = NewPointer("") + } + + if s.EnableCustomGroups == nil { + s.EnableCustomGroups = NewPointer(true) + } + + if s.PostPriority == nil { + s.PostPriority = NewPointer(true) + } + + if s.AllowPersistentNotifications == nil { + s.AllowPersistentNotifications = NewPointer(true) + } + + if s.AllowPersistentNotificationsForGuests == nil { + s.AllowPersistentNotificationsForGuests = NewPointer(false) + } + + if s.PersistentNotificationIntervalMinutes == nil { + s.PersistentNotificationIntervalMinutes = NewPointer(5) + } + + if s.PersistentNotificationMaxCount == nil { + s.PersistentNotificationMaxCount = NewPointer(6) + } + + if s.PersistentNotificationMaxRecipients == nil { + s.PersistentNotificationMaxRecipients = NewPointer(5) + } + + if s.AllowSyncedDrafts == nil { + s.AllowSyncedDrafts = NewPointer(true) + } + + if s.UniqueEmojiReactionLimitPerPost == nil { + s.UniqueEmojiReactionLimitPerPost = NewPointer(ServiceSettingsDefaultUniqueReactionsPerPost) + } + + if *s.UniqueEmojiReactionLimitPerPost > ServiceSettingsMaxUniqueReactionsPerPost { + s.UniqueEmojiReactionLimitPerPost = NewPointer(ServiceSettingsMaxUniqueReactionsPerPost) + } + + if s.RefreshPostStatsRunTime == nil { + s.RefreshPostStatsRunTime = NewPointer("00:00") + } + + if s.MaximumPayloadSizeBytes == nil { + s.MaximumPayloadSizeBytes = NewPointer(int64(300000)) + } + + if s.MaximumURLLength == nil { + s.MaximumURLLength = NewPointer(ServiceSettingsDefaultMaxURLLength) + } + + if s.ScheduledPosts == nil { + s.ScheduledPosts = NewPointer(true) + } + + if s.EnableWebHubChannelIteration == nil { + s.EnableWebHubChannelIteration = NewPointer(false) + } + + if s.FrameAncestors == nil { + s.FrameAncestors = NewPointer("") + } + + if !isSafeLink(s.DeleteAccountLink) { + *s.DeleteAccountLink = "" + } + + if s.DeleteAccountLink == nil { + s.DeleteAccountLink = NewPointer("") + } +} + +type CacheSettings struct { + CacheType *string `access:",write_restrictable,cloud_restrictable"` + RedisAddress *string `access:",write_restrictable,cloud_restrictable"` // telemetry: none + RedisPassword *string `access:",write_restrictable,cloud_restrictable"` // telemetry: none + RedisDB *int `access:",write_restrictable,cloud_restrictable"` // telemetry: none + RedisCachePrefix *string `access:",write_restrictable,cloud_restrictable"` // telemetry: none + DisableClientCache *bool `access:",write_restrictable,cloud_restrictable"` // telemetry: none +} + +func (s *CacheSettings) SetDefaults() { + if s.CacheType == nil { + s.CacheType = NewPointer(CacheTypeLRU) + } + + if s.RedisAddress == nil { + s.RedisAddress = NewPointer("") + } + + if s.RedisPassword == nil { + s.RedisPassword = NewPointer("") + } + + if s.RedisDB == nil { + s.RedisDB = NewPointer(-1) + } + + if s.RedisCachePrefix == nil { + s.RedisCachePrefix = NewPointer("") + } + + if s.DisableClientCache == nil { + s.DisableClientCache = NewPointer(false) + } +} + +func (s *CacheSettings) isValid() *AppError { + if *s.CacheType != CacheTypeLRU && *s.CacheType != CacheTypeRedis { + return NewAppError("Config.IsValid", "model.config.is_valid.cache_type.app_error", nil, "", http.StatusBadRequest) + } + + if *s.CacheType == CacheTypeRedis && *s.RedisAddress == "" { + return NewAppError("Config.IsValid", "model.config.is_valid.empty_redis_address.app_error", nil, "", http.StatusBadRequest) + } + + if *s.CacheType == CacheTypeRedis && *s.RedisDB < 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.invalid_redis_db.app_error", nil, "", http.StatusBadRequest) + } + + return nil +} + +type ClusterSettings struct { + Enable *bool `access:"environment_high_availability,write_restrictable"` + ClusterName *string `access:"environment_high_availability,write_restrictable,cloud_restrictable"` // telemetry: none + OverrideHostname *string `access:"environment_high_availability,write_restrictable,cloud_restrictable"` // telemetry: none + NetworkInterface *string `access:"environment_high_availability,write_restrictable,cloud_restrictable"` + BindAddress *string `access:"environment_high_availability,write_restrictable,cloud_restrictable"` + AdvertiseAddress *string `access:"environment_high_availability,write_restrictable,cloud_restrictable"` + UseIPAddress *bool `access:"environment_high_availability,write_restrictable,cloud_restrictable"` + EnableGossipCompression *bool `access:"environment_high_availability,write_restrictable,cloud_restrictable"` + // Deprecated: use EnableGossipEncryption + EnableExperimentalGossipEncryption *bool `json:",omitempty"` + EnableGossipEncryption *bool `access:"environment_high_availability,write_restrictable,cloud_restrictable"` + ReadOnlyConfig *bool `access:"environment_high_availability,write_restrictable,cloud_restrictable"` + GossipPort *int `access:"environment_high_availability,write_restrictable,cloud_restrictable"` // telemetry: none +} + +func (s *ClusterSettings) SetDefaults() { + if s.Enable == nil { + s.Enable = NewPointer(false) + } + + if s.ClusterName == nil { + s.ClusterName = NewPointer("") + } + + if s.OverrideHostname == nil { + s.OverrideHostname = NewPointer("") + } + + if s.NetworkInterface == nil { + s.NetworkInterface = NewPointer("") + } + + if s.BindAddress == nil { + s.BindAddress = NewPointer("") + } + + if s.AdvertiseAddress == nil { + s.AdvertiseAddress = NewPointer("") + } + + if s.UseIPAddress == nil { + s.UseIPAddress = NewPointer(true) + } + + if s.EnableGossipEncryption == nil { + if s.EnableExperimentalGossipEncryption != nil { + s.EnableGossipEncryption = NewPointer(*s.EnableExperimentalGossipEncryption) + } else { + s.EnableGossipEncryption = NewPointer(true) + } + } + + if s.EnableGossipCompression == nil { + s.EnableGossipCompression = NewPointer(true) + } + + if s.ReadOnlyConfig == nil { + s.ReadOnlyConfig = NewPointer(true) + } + + if s.GossipPort == nil { + s.GossipPort = NewPointer(8074) + } +} + +type MetricsSettings struct { + Enable *bool `access:"environment_performance_monitoring,write_restrictable,cloud_restrictable"` + BlockProfileRate *int `access:"environment_performance_monitoring,write_restrictable,cloud_restrictable"` + ListenAddress *string `access:"environment_performance_monitoring,write_restrictable,cloud_restrictable"` // telemetry: none + EnableClientMetrics *bool `access:"environment_performance_monitoring,write_restrictable,cloud_restrictable"` + EnableNotificationMetrics *bool `access:"environment_performance_monitoring,write_restrictable,cloud_restrictable"` + ClientSideUserIds []string `access:"environment_performance_monitoring,write_restrictable,cloud_restrictable"` // telemetry: none +} + +func (s *MetricsSettings) SetDefaults() { + if s.ListenAddress == nil { + s.ListenAddress = NewPointer(":8067") + } + + if s.Enable == nil { + s.Enable = NewPointer(false) + } + + if s.BlockProfileRate == nil { + s.BlockProfileRate = NewPointer(0) + } + + if s.EnableClientMetrics == nil { + s.EnableClientMetrics = NewPointer(true) + } + + if s.EnableNotificationMetrics == nil { + s.EnableNotificationMetrics = NewPointer(true) + } + + if s.ClientSideUserIds == nil { + s.ClientSideUserIds = []string{} + } +} + +func (s *MetricsSettings) isValid() *AppError { + const maxLength = 5 + if len(s.ClientSideUserIds) > maxLength { + return NewAppError("MetricsSettings.IsValid", "model.config.is_valid.metrics_client_side_user_ids.app_error", map[string]any{"MaxLength": maxLength, "CurrentLength": len(s.ClientSideUserIds)}, "", http.StatusBadRequest) + } + for _, id := range s.ClientSideUserIds { + if !IsValidId(id) { + return NewAppError("MetricsSettings.IsValid", "model.config.is_valid.metrics_client_side_user_id.app_error", map[string]any{"Id": id}, "", http.StatusBadRequest) + } + } + return nil +} + +type ExperimentalSettings struct { + // Deprecated: This field is no longer in use, server will fail to start if enabled. + ClientSideCertEnable *bool `access:"experimental_features,cloud_restrictable"` + LinkMetadataTimeoutMilliseconds *int64 `access:"experimental_features,write_restrictable,cloud_restrictable"` + RestrictSystemAdmin *bool `access:"*_read,write_restrictable"` + EnableSharedChannels *bool `access:"experimental_features"` // Deprecated: use `ConnectedWorkspacesSettings.EnableSharedChannels` + EnableRemoteClusterService *bool `access:"experimental_features"` // Deprecated: use `ConnectedWorkspacesSettings.EnableRemoteClusterService` + DisableAppBar *bool `access:"experimental_features"` + DisableRefetchingOnBrowserFocus *bool `access:"experimental_features"` + DelayChannelAutocomplete *bool `access:"experimental_features"` + DisableWakeUpReconnectHandler *bool `access:"experimental_features"` + UsersStatusAndProfileFetchingPollIntervalMilliseconds *int64 `access:"experimental_features"` + YoutubeReferrerPolicy *bool `access:"experimental_features"` + ExperimentalChannelCategorySorting *bool `access:"experimental_features"` +} + +func (s *ExperimentalSettings) SetDefaults() { + if s.ClientSideCertEnable == nil { + s.ClientSideCertEnable = NewPointer(false) + } + + if s.LinkMetadataTimeoutMilliseconds == nil { + s.LinkMetadataTimeoutMilliseconds = NewPointer(int64(ExperimentalSettingsDefaultLinkMetadataTimeoutMilliseconds)) + } + + if s.RestrictSystemAdmin == nil { + s.RestrictSystemAdmin = NewPointer(false) + } + + if s.EnableSharedChannels == nil { + s.EnableSharedChannels = NewPointer(false) + } + + if s.EnableRemoteClusterService == nil { + s.EnableRemoteClusterService = NewPointer(false) + } + + if s.DisableAppBar == nil { + s.DisableAppBar = NewPointer(false) + } + + if s.DisableRefetchingOnBrowserFocus == nil { + s.DisableRefetchingOnBrowserFocus = NewPointer(false) + } + + if s.DelayChannelAutocomplete == nil { + s.DelayChannelAutocomplete = NewPointer(false) + } + + if s.DisableWakeUpReconnectHandler == nil { + s.DisableWakeUpReconnectHandler = NewPointer(false) + } + + if s.UsersStatusAndProfileFetchingPollIntervalMilliseconds == nil { + s.UsersStatusAndProfileFetchingPollIntervalMilliseconds = NewPointer(int64(ExperimentalSettingsDefaultUsersStatusAndProfileFetchingPollIntervalMilliseconds)) + } + + if s.YoutubeReferrerPolicy == nil { + s.YoutubeReferrerPolicy = NewPointer(false) + } + + if s.ExperimentalChannelCategorySorting == nil { + s.ExperimentalChannelCategorySorting = NewPointer(false) + } +} + +type AnalyticsSettings struct { + MaxUsersForStatistics *int `access:"write_restrictable,cloud_restrictable"` +} + +func (s *AnalyticsSettings) SetDefaults() { + if s.MaxUsersForStatistics == nil { + s.MaxUsersForStatistics = NewPointer(AnalyticsSettingsDefaultMaxUsersForStatistics) + } +} + +type SSOSettings struct { + Enable *bool `access:"authentication_openid"` + Secret *string `access:"authentication_openid"` // telemetry: none + Id *string `access:"authentication_openid"` // telemetry: none + Scope *string `access:"authentication_openid"` // telemetry: none + AuthEndpoint *string `access:"authentication_openid"` // telemetry: none + TokenEndpoint *string `access:"authentication_openid"` // telemetry: none + UserAPIEndpoint *string `access:"authentication_openid"` // telemetry: none + DiscoveryEndpoint *string `access:"authentication_openid"` // telemetry: none + ButtonText *string `access:"authentication_openid"` // telemetry: none + ButtonColor *string `access:"authentication_openid"` // telemetry: none +} + +func (s *SSOSettings) setDefaults(scope, authEndpoint, tokenEndpoint, userAPIEndpoint, buttonColor string) { + if s.Enable == nil { + s.Enable = NewPointer(false) + } + + if s.Secret == nil { + s.Secret = NewPointer("") + } + + if s.Id == nil { + s.Id = NewPointer("") + } + + if s.Scope == nil { + s.Scope = NewPointer(scope) + } + + if s.DiscoveryEndpoint == nil { + s.DiscoveryEndpoint = NewPointer("") + } + + if s.AuthEndpoint == nil { + s.AuthEndpoint = NewPointer(authEndpoint) + } + + if s.TokenEndpoint == nil { + s.TokenEndpoint = NewPointer(tokenEndpoint) + } + + if s.UserAPIEndpoint == nil { + s.UserAPIEndpoint = NewPointer(userAPIEndpoint) + } + + if s.ButtonText == nil { + s.ButtonText = NewPointer("") + } + + if s.ButtonColor == nil { + s.ButtonColor = NewPointer(buttonColor) + } +} + +type Office365Settings struct { + Enable *bool `access:"authentication_openid"` + Secret *string `access:"authentication_openid"` // telemetry: none + Id *string `access:"authentication_openid"` // telemetry: none + Scope *string `access:"authentication_openid"` + AuthEndpoint *string `access:"authentication_openid"` // telemetry: none + TokenEndpoint *string `access:"authentication_openid"` // telemetry: none + UserAPIEndpoint *string `access:"authentication_openid"` // telemetry: none + DiscoveryEndpoint *string `access:"authentication_openid"` // telemetry: none + DirectoryId *string `access:"authentication_openid"` // telemetry: none +} + +func (s *Office365Settings) setDefaults() { + if s.Enable == nil { + s.Enable = NewPointer(false) + } + + if s.Id == nil { + s.Id = NewPointer("") + } + + if s.Secret == nil { + s.Secret = NewPointer("") + } + + if s.Scope == nil { + s.Scope = NewPointer(Office365SettingsDefaultScope) + } + + if s.DiscoveryEndpoint == nil { + s.DiscoveryEndpoint = NewPointer("") + } + + if s.AuthEndpoint == nil { + s.AuthEndpoint = NewPointer(Office365SettingsDefaultAuthEndpoint) + } + + if s.TokenEndpoint == nil { + s.TokenEndpoint = NewPointer(Office365SettingsDefaultTokenEndpoint) + } + + if s.UserAPIEndpoint == nil { + s.UserAPIEndpoint = NewPointer(Office365SettingsDefaultUserAPIEndpoint) + } + + if s.DirectoryId == nil { + s.DirectoryId = NewPointer("") + } +} + +func (s *Office365Settings) SSOSettings() *SSOSettings { + ssoSettings := SSOSettings{} + ssoSettings.Enable = s.Enable + ssoSettings.Secret = s.Secret + ssoSettings.Id = s.Id + ssoSettings.Scope = s.Scope + ssoSettings.DiscoveryEndpoint = s.DiscoveryEndpoint + ssoSettings.AuthEndpoint = s.AuthEndpoint + ssoSettings.TokenEndpoint = s.TokenEndpoint + ssoSettings.UserAPIEndpoint = s.UserAPIEndpoint + return &ssoSettings +} + +type ReplicaLagSettings struct { + DataSource *string `access:"environment,write_restrictable,cloud_restrictable"` // telemetry: none + QueryAbsoluteLag *string `access:"environment,write_restrictable,cloud_restrictable"` // telemetry: none + QueryTimeLag *string `access:"environment,write_restrictable,cloud_restrictable"` // telemetry: none +} + +type SqlSettings struct { + DriverName *string `access:"environment_database,write_restrictable,cloud_restrictable"` + DataSource *string `access:"environment_database,write_restrictable,cloud_restrictable"` // telemetry: none + DataSourceReplicas []string `access:"environment_database,write_restrictable,cloud_restrictable"` + DataSourceSearchReplicas []string `access:"environment_database,write_restrictable,cloud_restrictable"` + MaxIdleConns *int `access:"environment_database,write_restrictable,cloud_restrictable"` + ConnMaxLifetimeMilliseconds *int `access:"environment_database,write_restrictable,cloud_restrictable"` + ConnMaxIdleTimeMilliseconds *int `access:"environment_database,write_restrictable,cloud_restrictable"` + MaxOpenConns *int `access:"environment_database,write_restrictable,cloud_restrictable"` + Trace *bool `access:"environment_database,write_restrictable,cloud_restrictable"` + AtRestEncryptKey *string `access:"environment_database,write_restrictable,cloud_restrictable"` // telemetry: none + QueryTimeout *int `access:"environment_database,write_restrictable,cloud_restrictable"` + DisableDatabaseSearch *bool `access:"environment_database,write_restrictable,cloud_restrictable"` + MigrationsStatementTimeoutSeconds *int `access:"environment_database,write_restrictable,cloud_restrictable"` + ReplicaLagSettings []*ReplicaLagSettings `access:"environment_database,write_restrictable,cloud_restrictable"` // telemetry: none + ReplicaMonitorIntervalSeconds *int `access:"environment_database,write_restrictable,cloud_restrictable"` +} + +func (s *SqlSettings) SetDefaults(isUpdate bool) { + if s.DriverName == nil { + s.DriverName = NewPointer(DatabaseDriverPostgres) + } + + if s.DataSource == nil { + s.DataSource = NewPointer(SqlSettingsDefaultDataSource) + } + + if s.DataSourceReplicas == nil { + s.DataSourceReplicas = []string{} + } + + if s.DataSourceSearchReplicas == nil { + s.DataSourceSearchReplicas = []string{} + } + + if isUpdate { + // When updating an existing configuration, ensure an encryption key has been specified. + if s.AtRestEncryptKey == nil || *s.AtRestEncryptKey == "" { + s.AtRestEncryptKey = NewPointer(NewRandomString(32)) + } + } else { + // When generating a blank configuration, leave this key empty to be generated on server start. + s.AtRestEncryptKey = NewPointer("") + } + + if s.MaxIdleConns == nil { + s.MaxIdleConns = NewPointer(50) + } + + if s.MaxOpenConns == nil { + s.MaxOpenConns = NewPointer(100) + } + + if s.ConnMaxLifetimeMilliseconds == nil { + s.ConnMaxLifetimeMilliseconds = NewPointer(3600000) + } + + if s.ConnMaxIdleTimeMilliseconds == nil { + s.ConnMaxIdleTimeMilliseconds = NewPointer(300000) + } + + if s.Trace == nil { + s.Trace = NewPointer(false) + } + + if s.QueryTimeout == nil { + s.QueryTimeout = NewPointer(30) + } + + if s.DisableDatabaseSearch == nil { + s.DisableDatabaseSearch = NewPointer(false) + } + + if s.MigrationsStatementTimeoutSeconds == nil { + s.MigrationsStatementTimeoutSeconds = NewPointer(100000) + } + + if s.ReplicaLagSettings == nil { + s.ReplicaLagSettings = []*ReplicaLagSettings{} + } + + if s.ReplicaMonitorIntervalSeconds == nil { + s.ReplicaMonitorIntervalSeconds = NewPointer(5) + } +} + +type LogSettings struct { + EnableConsole *bool `access:"environment_logging,write_restrictable,cloud_restrictable"` + ConsoleLevel *string `access:"environment_logging,write_restrictable,cloud_restrictable"` + ConsoleJson *bool `access:"environment_logging,write_restrictable,cloud_restrictable"` + EnableColor *bool `access:"environment_logging,write_restrictable,cloud_restrictable"` // telemetry: none + EnableFile *bool `access:"environment_logging,write_restrictable,cloud_restrictable"` + FileLevel *string `access:"environment_logging,write_restrictable,cloud_restrictable"` + FileJson *bool `access:"environment_logging,write_restrictable,cloud_restrictable"` + FileLocation *string `access:"environment_logging,write_restrictable,cloud_restrictable"` + EnableWebhookDebugging *bool `access:"environment_logging,write_restrictable,cloud_restrictable"` + EnableDiagnostics *bool `access:"environment_logging,write_restrictable,cloud_restrictable"` // telemetry: none + EnableSentry *bool `access:"environment_logging,write_restrictable,cloud_restrictable"` // telemetry: none + AdvancedLoggingJSON json.RawMessage `access:"environment_logging,write_restrictable,cloud_restrictable"` + MaxFieldSize *int `access:"environment_logging,write_restrictable,cloud_restrictable"` +} + +func NewLogSettings() *LogSettings { + settings := &LogSettings{} + settings.SetDefaults() + return settings +} + +func (s *LogSettings) isValid() *AppError { + cfg := make(mlog.LoggerConfiguration) + err := json.Unmarshal(s.AdvancedLoggingJSON, &cfg) + if err != nil { + return NewAppError("LogSettings.isValid", "model.config.is_valid.log.advanced_logging.json", map[string]any{"Error": err}, "", http.StatusBadRequest).Wrap(err) + } + + err = cfg.IsValid() + if err != nil { + return NewAppError("LogSettings.isValid", "model.config.is_valid.log.advanced_logging.parse", map[string]any{"Error": err}, "", http.StatusBadRequest).Wrap(err) + } + + return nil +} + +func (s *LogSettings) SetDefaults() { + if s.EnableConsole == nil { + s.EnableConsole = NewPointer(true) + } + + if s.ConsoleLevel == nil { + s.ConsoleLevel = NewPointer("DEBUG") + } + + if s.EnableColor == nil { + s.EnableColor = NewPointer(false) + } + + if s.EnableFile == nil { + s.EnableFile = NewPointer(true) + } + + if s.FileLevel == nil { + s.FileLevel = NewPointer("INFO") + } + + if s.FileLocation == nil { + s.FileLocation = NewPointer("") + } + + if s.EnableWebhookDebugging == nil { + s.EnableWebhookDebugging = NewPointer(true) + } + + if s.EnableDiagnostics == nil { + s.EnableDiagnostics = NewPointer(true) + } + + if s.EnableSentry == nil { + s.EnableSentry = NewPointer(*s.EnableDiagnostics) + } + + if s.ConsoleJson == nil { + s.ConsoleJson = NewPointer(true) + } + + if s.FileJson == nil { + s.FileJson = NewPointer(true) + } + + if utils.IsEmptyJSON(s.AdvancedLoggingJSON) { + s.AdvancedLoggingJSON = []byte("{}") + } + + if s.MaxFieldSize == nil { + s.MaxFieldSize = NewPointer(2048) + } +} + +// GetAdvancedLoggingConfig returns the advanced logging config as a []byte. +func (s *LogSettings) GetAdvancedLoggingConfig() []byte { + if !utils.IsEmptyJSON(s.AdvancedLoggingJSON) { + return s.AdvancedLoggingJSON + } + + return []byte("{}") +} + +type ExperimentalAuditSettings struct { + FileEnabled *bool `access:"experimental_features,write_restrictable,cloud_restrictable"` + FileName *string `access:"experimental_features,write_restrictable,cloud_restrictable"` // telemetry: none + FileMaxSizeMB *int `access:"experimental_features,write_restrictable,cloud_restrictable"` + FileMaxAgeDays *int `access:"experimental_features,write_restrictable,cloud_restrictable"` + FileMaxBackups *int `access:"experimental_features,write_restrictable,cloud_restrictable"` + FileCompress *bool `access:"experimental_features,write_restrictable,cloud_restrictable"` + FileMaxQueueSize *int `access:"experimental_features,write_restrictable,cloud_restrictable"` + AdvancedLoggingJSON json.RawMessage `access:"experimental_features"` + Certificate *string `access:"experimental_features"` // telemetry: none +} + +func (s *ExperimentalAuditSettings) isValid() *AppError { + if *s.FileEnabled { + if *s.FileName == "" { + return NewAppError("ExperimentalAuditSettings.isValid", "model.config.is_valid.experimental_audit_settings.file_name_empty", nil, "", http.StatusBadRequest) + } + + if strings.HasSuffix(*s.FileName, `\`) { + return NewAppError("ExperimentalAuditSettings.isValid", "model.config.is_valid.experimental_audit_settings.file_name_is_directory", nil, "", http.StatusBadRequest) + } + + if *s.FileMaxSizeMB <= 0 { + return NewAppError("ExperimentalAuditSettings.isValid", "model.config.is_valid.experimental_audit_settings.file_max_size_invalid", nil, "", http.StatusBadRequest) + } + + if *s.FileMaxAgeDays < 0 { + return NewAppError("ExperimentalAuditSettings.isValid", "model.config.is_valid.experimental_audit_settings.file_max_age_invalid", nil, "", http.StatusBadRequest) + } + + if *s.FileMaxBackups < 0 { + return NewAppError("ExperimentalAuditSettings.isValid", "model.config.is_valid.experimental_audit_settings.file_max_backups_invalid", nil, "", http.StatusBadRequest) + } + + if *s.FileMaxQueueSize <= 0 { + return NewAppError("ExperimentalAuditSettings.isValid", "model.config.is_valid.experimental_audit_settings.file_max_queue_size_invalid", nil, "", http.StatusBadRequest) + } + } + + cfg := make(mlog.LoggerConfiguration) + err := json.Unmarshal(s.AdvancedLoggingJSON, &cfg) + if err != nil { + return NewAppError("ExperimentalAuditSettings.isValid", "model.config.is_valid.log.advanced_logging.json", map[string]any{"Error": err}, "", http.StatusBadRequest).Wrap(err) + } + + err = cfg.IsValid() + if err != nil { + return NewAppError("ExperimentalAuditSettings.isValid", "model.config.is_valid.log.advanced_logging.parse", map[string]any{"Error": err}, "", http.StatusBadRequest).Wrap(err) + } + + return nil +} + +func (s *ExperimentalAuditSettings) SetDefaults() { + if s.FileEnabled == nil { + s.FileEnabled = NewPointer(false) + } + + if s.FileName == nil { + s.FileName = NewPointer("") + } + + if s.FileMaxSizeMB == nil { + s.FileMaxSizeMB = NewPointer(100) + } + + if s.FileMaxAgeDays == nil { + s.FileMaxAgeDays = NewPointer(0) // no limit on age + } + + if s.FileMaxBackups == nil { // no limit on number of backups + s.FileMaxBackups = NewPointer(0) + } + + if s.FileCompress == nil { + s.FileCompress = NewPointer(false) + } + + if s.FileMaxQueueSize == nil { + s.FileMaxQueueSize = NewPointer(1000) + } + + if utils.IsEmptyJSON(s.AdvancedLoggingJSON) { + s.AdvancedLoggingJSON = []byte("{}") + } + + if s.Certificate == nil { + s.Certificate = NewPointer("") + } +} + +// GetAdvancedLoggingConfig returns the advanced logging config as a []byte. +func (s *ExperimentalAuditSettings) GetAdvancedLoggingConfig() []byte { + if !utils.IsEmptyJSON(s.AdvancedLoggingJSON) { + return s.AdvancedLoggingJSON + } + + return []byte("{}") +} + +type PasswordSettings struct { + MinimumLength *int `access:"authentication_password"` + Lowercase *bool `access:"authentication_password"` + Number *bool `access:"authentication_password"` + Uppercase *bool `access:"authentication_password"` + Symbol *bool `access:"authentication_password"` + EnableForgotLink *bool `access:"authentication_password"` +} + +func (s *PasswordSettings) SetDefaults() { + if s.MinimumLength == nil { + s.MinimumLength = NewPointer(8) + } + + if s.Lowercase == nil { + s.Lowercase = NewPointer(false) + } + + if s.Number == nil { + s.Number = NewPointer(false) + } + + if s.Uppercase == nil { + s.Uppercase = NewPointer(false) + } + + if s.Symbol == nil { + s.Symbol = NewPointer(false) + } + + if s.EnableForgotLink == nil { + s.EnableForgotLink = NewPointer(true) + } +} + +type FileSettings struct { + EnableFileAttachments *bool `access:"site_file_sharing_and_downloads"` + EnableMobileUpload *bool `access:"site_file_sharing_and_downloads"` + EnableMobileDownload *bool `access:"site_file_sharing_and_downloads"` + MaxFileSize *int64 `access:"environment_file_storage,cloud_restrictable"` + MaxImageResolution *int64 `access:"environment_file_storage,cloud_restrictable"` + MaxImageDecoderConcurrency *int64 `access:"environment_file_storage,cloud_restrictable"` + DriverName *string `access:"environment_file_storage,write_restrictable,cloud_restrictable"` + Directory *string `access:"environment_file_storage,write_restrictable,cloud_restrictable"` + EnablePublicLink *bool `access:"site_public_links,cloud_restrictable"` + ExtractContent *bool `access:"environment_file_storage,write_restrictable"` + ArchiveRecursion *bool `access:"environment_file_storage,write_restrictable"` + PublicLinkSalt *string `access:"site_public_links,cloud_restrictable"` // telemetry: none + InitialFont *string `access:"environment_file_storage,cloud_restrictable"` // telemetry: none + AmazonS3AccessKeyId *string `access:"environment_file_storage,write_restrictable,cloud_restrictable"` // telemetry: none + AmazonS3SecretAccessKey *string `access:"environment_file_storage,write_restrictable,cloud_restrictable"` // telemetry: none + AmazonS3Bucket *string `access:"environment_file_storage,write_restrictable,cloud_restrictable"` // telemetry: none + AmazonS3PathPrefix *string `access:"environment_file_storage,write_restrictable,cloud_restrictable"` // telemetry: none + AmazonS3Region *string `access:"environment_file_storage,write_restrictable,cloud_restrictable"` // telemetry: none + AmazonS3Endpoint *string `access:"environment_file_storage,write_restrictable,cloud_restrictable"` // telemetry: none + AmazonS3SSL *bool `access:"environment_file_storage,write_restrictable,cloud_restrictable"` + AmazonS3SignV2 *bool `access:"environment_file_storage,write_restrictable,cloud_restrictable"` + AmazonS3SSE *bool `access:"environment_file_storage,write_restrictable,cloud_restrictable"` + AmazonS3Trace *bool `access:"environment_file_storage,write_restrictable,cloud_restrictable"` + AmazonS3RequestTimeoutMilliseconds *int64 `access:"environment_file_storage,write_restrictable,cloud_restrictable"` // telemetry: none + AmazonS3UploadPartSizeBytes *int64 `access:"environment_file_storage,write_restrictable,cloud_restrictable"` // telemetry: none + AmazonS3StorageClass *string `access:"environment_file_storage,write_restrictable,cloud_restrictable"` // telemetry: none + // Export store settings + DedicatedExportStore *bool `access:"environment_file_storage,write_restrictable"` + ExportDriverName *string `access:"environment_file_storage,write_restrictable"` + ExportDirectory *string `access:"environment_file_storage,write_restrictable"` // telemetry: none + ExportAmazonS3AccessKeyId *string `access:"environment_file_storage,write_restrictable"` // telemetry: none + ExportAmazonS3SecretAccessKey *string `access:"environment_file_storage,write_restrictable"` // telemetry: none + ExportAmazonS3Bucket *string `access:"environment_file_storage,write_restrictable"` // telemetry: none + ExportAmazonS3PathPrefix *string `access:"environment_file_storage,write_restrictable"` // telemetry: none + ExportAmazonS3Region *string `access:"environment_file_storage,write_restrictable"` // telemetry: none + ExportAmazonS3Endpoint *string `access:"environment_file_storage,write_restrictable"` // telemetry: none + ExportAmazonS3SSL *bool `access:"environment_file_storage,write_restrictable"` + ExportAmazonS3SignV2 *bool `access:"environment_file_storage,write_restrictable"` + ExportAmazonS3SSE *bool `access:"environment_file_storage,write_restrictable"` + ExportAmazonS3Trace *bool `access:"environment_file_storage,write_restrictable"` + ExportAmazonS3RequestTimeoutMilliseconds *int64 `access:"environment_file_storage,write_restrictable"` // telemetry: none + ExportAmazonS3PresignExpiresSeconds *int64 `access:"environment_file_storage,write_restrictable"` // telemetry: none + ExportAmazonS3UploadPartSizeBytes *int64 `access:"environment_file_storage,write_restrictable"` // telemetry: none + ExportAmazonS3StorageClass *string `access:"environment_file_storage,write_restrictable"` // telemetry: none +} + +func (s *FileSettings) SetDefaults(isUpdate bool) { + if s.EnableFileAttachments == nil { + s.EnableFileAttachments = NewPointer(true) + } + + if s.EnableMobileUpload == nil { + s.EnableMobileUpload = NewPointer(true) + } + + if s.EnableMobileDownload == nil { + s.EnableMobileDownload = NewPointer(true) + } + + if s.MaxFileSize == nil { + s.MaxFileSize = NewPointer(int64(100 * 1024 * 1024)) // 100MB (IEC) + } + + if s.MaxImageResolution == nil { + s.MaxImageResolution = NewPointer(int64(7680 * 4320)) // 8K, ~33MPX + } + + if s.MaxImageDecoderConcurrency == nil { + s.MaxImageDecoderConcurrency = NewPointer(int64(-1)) // Default to NumCPU + } + + if s.DriverName == nil { + s.DriverName = NewPointer(ImageDriverLocal) + } + + if s.Directory == nil || *s.Directory == "" { + s.Directory = NewPointer(FileSettingsDefaultDirectory) + } + + if s.EnablePublicLink == nil { + s.EnablePublicLink = NewPointer(false) + } + + if s.ExtractContent == nil { + s.ExtractContent = NewPointer(true) + } + + if s.ArchiveRecursion == nil { + s.ArchiveRecursion = NewPointer(false) + } + + if isUpdate { + // When updating an existing configuration, ensure link salt has been specified. + if s.PublicLinkSalt == nil || *s.PublicLinkSalt == "" { + s.PublicLinkSalt = NewPointer(NewRandomString(32)) + } + } else { + // When generating a blank configuration, leave link salt empty to be generated on server start. + s.PublicLinkSalt = NewPointer("") + } + + if s.InitialFont == nil { + // Defaults to "nunito-bold.ttf" + s.InitialFont = NewPointer("nunito-bold.ttf") + } + + if s.AmazonS3AccessKeyId == nil { + s.AmazonS3AccessKeyId = NewPointer("") + } + + if s.AmazonS3SecretAccessKey == nil { + s.AmazonS3SecretAccessKey = NewPointer("") + } + + if s.AmazonS3Bucket == nil { + s.AmazonS3Bucket = NewPointer("") + } + + if s.AmazonS3PathPrefix == nil { + s.AmazonS3PathPrefix = NewPointer("") + } + + if s.AmazonS3Region == nil { + s.AmazonS3Region = NewPointer("") + } + + if s.AmazonS3Endpoint == nil || *s.AmazonS3Endpoint == "" { + // Defaults to "s3.amazonaws.com" + s.AmazonS3Endpoint = NewPointer("s3.amazonaws.com") + } + + if s.AmazonS3SSL == nil { + s.AmazonS3SSL = NewPointer(true) // Secure by default. + } + + if s.AmazonS3SignV2 == nil { + s.AmazonS3SignV2 = new(bool) + // Signature v2 is not enabled by default. + } + + if s.AmazonS3SSE == nil { + s.AmazonS3SSE = NewPointer(false) // Not Encrypted by default. + } + + if s.AmazonS3Trace == nil { + s.AmazonS3Trace = NewPointer(false) + } + + if s.AmazonS3RequestTimeoutMilliseconds == nil { + s.AmazonS3RequestTimeoutMilliseconds = NewPointer(int64(30000)) + } + + if s.AmazonS3UploadPartSizeBytes == nil { + s.AmazonS3UploadPartSizeBytes = NewPointer(int64(FileSettingsDefaultS3UploadPartSizeBytes)) + } + + if s.AmazonS3StorageClass == nil { + s.AmazonS3StorageClass = NewPointer("") + } + + if s.DedicatedExportStore == nil { + s.DedicatedExportStore = NewPointer(false) + } + + if s.ExportDriverName == nil { + s.ExportDriverName = NewPointer(ImageDriverLocal) + } + + if s.ExportDirectory == nil || *s.ExportDirectory == "" { + s.ExportDirectory = NewPointer(FileSettingsDefaultDirectory) + } + + if s.ExportAmazonS3AccessKeyId == nil { + s.ExportAmazonS3AccessKeyId = NewPointer("") + } + + if s.ExportAmazonS3SecretAccessKey == nil { + s.ExportAmazonS3SecretAccessKey = NewPointer("") + } + + if s.ExportAmazonS3Bucket == nil { + s.ExportAmazonS3Bucket = NewPointer("") + } + + if s.ExportAmazonS3PathPrefix == nil { + s.ExportAmazonS3PathPrefix = NewPointer("") + } + + if s.ExportAmazonS3Region == nil { + s.ExportAmazonS3Region = NewPointer("") + } + + if s.ExportAmazonS3Endpoint == nil || *s.ExportAmazonS3Endpoint == "" { + // Defaults to "s3.amazonaws.com" + s.ExportAmazonS3Endpoint = NewPointer("s3.amazonaws.com") + } + + if s.ExportAmazonS3SSL == nil { + s.ExportAmazonS3SSL = NewPointer(true) // Secure by default. + } + + if s.ExportAmazonS3SignV2 == nil { + s.ExportAmazonS3SignV2 = new(bool) + // Signature v2 is not enabled by default. + } + + if s.ExportAmazonS3SSE == nil { + s.ExportAmazonS3SSE = NewPointer(false) // Not Encrypted by default. + } + + if s.ExportAmazonS3Trace == nil { + s.ExportAmazonS3Trace = NewPointer(false) + } + + if s.ExportAmazonS3RequestTimeoutMilliseconds == nil { + s.ExportAmazonS3RequestTimeoutMilliseconds = NewPointer(int64(30000)) + } + + if s.ExportAmazonS3PresignExpiresSeconds == nil { + s.ExportAmazonS3PresignExpiresSeconds = NewPointer(int64(21600)) // 6h + } + + if s.ExportAmazonS3UploadPartSizeBytes == nil { + s.ExportAmazonS3UploadPartSizeBytes = NewPointer(int64(FileSettingsDefaultS3ExportUploadPartSizeBytes)) + } + + if s.ExportAmazonS3StorageClass == nil { + s.ExportAmazonS3StorageClass = NewPointer("") + } +} + +type EmailSettings struct { + EnableSignUpWithEmail *bool `access:"authentication_email"` + EnableSignInWithEmail *bool `access:"authentication_email"` + EnableSignInWithUsername *bool `access:"authentication_email"` + SendEmailNotifications *bool `access:"site_notifications"` + UseChannelInEmailNotifications *bool `access:"experimental_features"` + RequireEmailVerification *bool `access:"authentication_email"` + FeedbackName *string `access:"site_notifications"` + FeedbackEmail *string `access:"site_notifications,cloud_restrictable"` + ReplyToAddress *string `access:"site_notifications,cloud_restrictable"` + FeedbackOrganization *string `access:"site_notifications"` + EnableSMTPAuth *bool `access:"environment_smtp,write_restrictable,cloud_restrictable"` + SMTPUsername *string `access:"environment_smtp,write_restrictable,cloud_restrictable"` // telemetry: none + SMTPPassword *string `access:"environment_smtp,write_restrictable,cloud_restrictable"` // telemetry: none + SMTPServer *string `access:"environment_smtp,write_restrictable,cloud_restrictable"` // telemetry: none + SMTPPort *string `access:"environment_smtp,write_restrictable,cloud_restrictable"` // telemetry: none + SMTPServerTimeout *int `access:"cloud_restrictable"` + ConnectionSecurity *string `access:"environment_smtp,write_restrictable,cloud_restrictable"` + SendPushNotifications *bool `access:"environment_push_notification_server"` + PushNotificationServer *string `access:"environment_push_notification_server"` // telemetry: none + PushNotificationContents *string `access:"site_notifications"` + PushNotificationBuffer *int // telemetry: none + EnableEmailBatching *bool `access:"site_notifications"` + EmailBatchingBufferSize *int `access:"experimental_features"` + EmailBatchingInterval *int `access:"experimental_features"` + EnablePreviewModeBanner *bool `access:"site_notifications"` + SkipServerCertificateVerification *bool `access:"environment_smtp,write_restrictable,cloud_restrictable"` + EmailNotificationContentsType *string `access:"site_notifications"` + LoginButtonColor *string `access:"experimental_features"` + LoginButtonBorderColor *string `access:"experimental_features"` + LoginButtonTextColor *string `access:"experimental_features"` +} + +func (s *EmailSettings) SetDefaults(isUpdate bool) { + if s.EnableSignUpWithEmail == nil { + s.EnableSignUpWithEmail = NewPointer(true) + } + + if s.EnableSignInWithEmail == nil { + s.EnableSignInWithEmail = NewPointer(*s.EnableSignUpWithEmail) + } + + if s.EnableSignInWithUsername == nil { + s.EnableSignInWithUsername = NewPointer(true) + } + + if s.SendEmailNotifications == nil { + s.SendEmailNotifications = NewPointer(true) + } + + if s.UseChannelInEmailNotifications == nil { + s.UseChannelInEmailNotifications = NewPointer(false) + } + + if s.RequireEmailVerification == nil { + s.RequireEmailVerification = NewPointer(false) + } + + if s.FeedbackName == nil { + s.FeedbackName = NewPointer("") + } + + if s.FeedbackEmail == nil { + s.FeedbackEmail = NewPointer("test@example.com") + } + + if s.ReplyToAddress == nil { + s.ReplyToAddress = NewPointer("test@example.com") + } + + if s.FeedbackOrganization == nil { + s.FeedbackOrganization = NewPointer(EmailSettingsDefaultFeedbackOrganization) + } + + if s.EnableSMTPAuth == nil { + if s.ConnectionSecurity == nil || *s.ConnectionSecurity == ConnSecurityNone { + s.EnableSMTPAuth = NewPointer(false) + } else { + s.EnableSMTPAuth = NewPointer(true) + } + } + + if s.SMTPUsername == nil { + s.SMTPUsername = NewPointer("") + } + + if s.SMTPPassword == nil { + s.SMTPPassword = NewPointer("") + } + + if s.SMTPServer == nil || *s.SMTPServer == "" { + s.SMTPServer = NewPointer(EmailSMTPDefaultServer) + } + + if s.SMTPPort == nil || *s.SMTPPort == "" { + s.SMTPPort = NewPointer(EmailSMTPDefaultPort) + } + + if s.SMTPServerTimeout == nil || *s.SMTPServerTimeout == 0 { + s.SMTPServerTimeout = NewPointer(10) + } + + if s.ConnectionSecurity == nil || *s.ConnectionSecurity == ConnSecurityPlain { + s.ConnectionSecurity = NewPointer(ConnSecurityNone) + } + + if s.SendPushNotifications == nil { + s.SendPushNotifications = NewPointer(!isUpdate) + } + + if s.PushNotificationServer == nil { + if isUpdate { + s.PushNotificationServer = NewPointer("") + } else { + s.PushNotificationServer = NewPointer(GenericNotificationServer) + } + } + + if s.PushNotificationContents == nil { + s.PushNotificationContents = NewPointer(FullNotification) + } + + if s.PushNotificationBuffer == nil { + s.PushNotificationBuffer = NewPointer(1000) + } + + if s.EnableEmailBatching == nil { + s.EnableEmailBatching = NewPointer(false) + } + + if s.EmailBatchingBufferSize == nil { + s.EmailBatchingBufferSize = NewPointer(EmailBatchingBufferSize) + } + + if s.EmailBatchingInterval == nil { + s.EmailBatchingInterval = NewPointer(EmailBatchingInterval) + } + + if s.EnablePreviewModeBanner == nil { + s.EnablePreviewModeBanner = NewPointer(true) + } + + if s.EnableSMTPAuth == nil { + if *s.ConnectionSecurity == ConnSecurityNone { + s.EnableSMTPAuth = NewPointer(false) + } else { + s.EnableSMTPAuth = NewPointer(true) + } + } + + if *s.ConnectionSecurity == ConnSecurityPlain { + *s.ConnectionSecurity = ConnSecurityNone + } + + if s.SkipServerCertificateVerification == nil { + s.SkipServerCertificateVerification = NewPointer(false) + } + + if s.EmailNotificationContentsType == nil { + s.EmailNotificationContentsType = NewPointer(EmailNotificationContentsFull) + } + + if s.LoginButtonColor == nil { + s.LoginButtonColor = NewPointer("#0000") + } + + if s.LoginButtonBorderColor == nil { + s.LoginButtonBorderColor = NewPointer("#2389D7") + } + + if s.LoginButtonTextColor == nil { + s.LoginButtonTextColor = NewPointer("#2389D7") + } +} + +type RateLimitSettings struct { + Enable *bool `access:"environment_rate_limiting,write_restrictable,cloud_restrictable"` + PerSec *int `access:"environment_rate_limiting,write_restrictable,cloud_restrictable"` + MaxBurst *int `access:"environment_rate_limiting,write_restrictable,cloud_restrictable"` + MemoryStoreSize *int `access:"environment_rate_limiting,write_restrictable,cloud_restrictable"` + VaryByRemoteAddr *bool `access:"environment_rate_limiting,write_restrictable,cloud_restrictable"` + VaryByUser *bool `access:"environment_rate_limiting,write_restrictable,cloud_restrictable"` + VaryByHeader string `access:"environment_rate_limiting,write_restrictable,cloud_restrictable"` +} + +func (s *RateLimitSettings) SetDefaults() { + if s.Enable == nil { + s.Enable = NewPointer(false) + } + + if s.PerSec == nil { + s.PerSec = NewPointer(10) + } + + if s.MaxBurst == nil { + s.MaxBurst = NewPointer(100) + } + + if s.MemoryStoreSize == nil { + s.MemoryStoreSize = NewPointer(10000) + } + + if s.VaryByRemoteAddr == nil { + s.VaryByRemoteAddr = NewPointer(true) + } + + if s.VaryByUser == nil { + s.VaryByUser = NewPointer(false) + } +} + +type PrivacySettings struct { + ShowEmailAddress *bool `access:"site_users_and_teams"` + ShowFullName *bool `access:"site_users_and_teams"` +} + +func (s *PrivacySettings) setDefaults() { + if s.ShowEmailAddress == nil { + s.ShowEmailAddress = NewPointer(true) + } + + if s.ShowFullName == nil { + s.ShowFullName = NewPointer(true) + } +} + +type SupportSettings struct { + TermsOfServiceLink *string `access:"site_customization,write_restrictable,cloud_restrictable"` + PrivacyPolicyLink *string `access:"site_customization,write_restrictable,cloud_restrictable"` + AboutLink *string `access:"site_customization,write_restrictable,cloud_restrictable"` + HelpLink *string `access:"site_customization"` + ReportAProblemLink *string `access:"site_customization,write_restrictable,cloud_restrictable"` + ReportAProblemType *string `access:"site_customization,write_restrictable,cloud_restrictable"` + ReportAProblemMail *string `access:"site_customization,write_restrictable,cloud_restrictable"` + AllowDownloadLogs *bool `access:"site_customization,write_restrictable,cloud_restrictable"` + ForgotPasswordLink *string `access:"site_customization,write_restrictable,cloud_restrictable"` + SupportEmail *string `access:"site_notifications"` + CustomTermsOfServiceEnabled *bool `access:"compliance_custom_terms_of_service"` + CustomTermsOfServiceReAcceptancePeriod *int `access:"compliance_custom_terms_of_service"` + EnableAskCommunityLink *bool `access:"site_customization"` +} + +func (s *SupportSettings) SetDefaults() { + if !isSafeLink(s.TermsOfServiceLink) { + *s.TermsOfServiceLink = SupportSettingsDefaultTermsOfServiceLink + } + + if s.TermsOfServiceLink == nil { + s.TermsOfServiceLink = NewPointer(SupportSettingsDefaultTermsOfServiceLink) + } + + if !isSafeLink(s.PrivacyPolicyLink) { + *s.PrivacyPolicyLink = "" + } + + if s.PrivacyPolicyLink == nil { + s.PrivacyPolicyLink = NewPointer(SupportSettingsDefaultPrivacyPolicyLink) + } + + if !isSafeLink(s.AboutLink) { + *s.AboutLink = "" + } + + if s.AboutLink == nil { + s.AboutLink = NewPointer(SupportSettingsDefaultAboutLink) + } + + if !isSafeLink(s.HelpLink) { + *s.HelpLink = "" + } + + if s.HelpLink == nil { + s.HelpLink = NewPointer(SupportSettingsDefaultHelpLink) + } + + if !isSafeLink(s.ReportAProblemLink) { + *s.ReportAProblemLink = "" + } + + if s.ReportAProblemLink == nil { + s.ReportAProblemLink = NewPointer(SupportSettingsDefaultReportAProblemLink) + } + + if s.ReportAProblemType == nil { + s.ReportAProblemType = NewPointer(SupportSettingsDefaultReportAProblemType) + } + + if s.ReportAProblemMail == nil { + s.ReportAProblemMail = NewPointer("") + } + + if s.AllowDownloadLogs == nil { + s.AllowDownloadLogs = NewPointer(true) + } + + if !isSafeLink(s.ForgotPasswordLink) { + *s.ForgotPasswordLink = "" + } + + if s.ForgotPasswordLink == nil { + s.ForgotPasswordLink = NewPointer("") + } + + if s.SupportEmail == nil { + s.SupportEmail = NewPointer(SupportSettingsDefaultSupportEmail) + } + + if s.CustomTermsOfServiceEnabled == nil { + s.CustomTermsOfServiceEnabled = NewPointer(false) + } + + if s.CustomTermsOfServiceReAcceptancePeriod == nil { + s.CustomTermsOfServiceReAcceptancePeriod = NewPointer(SupportSettingsDefaultReAcceptancePeriod) + } + + if s.EnableAskCommunityLink == nil { + s.EnableAskCommunityLink = NewPointer(true) + } +} + +type AnnouncementSettings struct { + EnableBanner *bool `access:"site_announcement_banner"` + BannerText *string `access:"site_announcement_banner"` // telemetry: none + BannerColor *string `access:"site_announcement_banner"` + BannerTextColor *string `access:"site_announcement_banner"` + AllowBannerDismissal *bool `access:"site_announcement_banner"` + AdminNoticesEnabled *bool `access:"site_notices"` + UserNoticesEnabled *bool `access:"site_notices"` + NoticesURL *string `access:"site_notices,write_restrictable"` // telemetry: none + NoticesFetchFrequency *int `access:"site_notices,write_restrictable"` // telemetry: none + NoticesSkipCache *bool `access:"site_notices,write_restrictable"` // telemetry: none +} + +func (s *AnnouncementSettings) SetDefaults() { + if s.EnableBanner == nil { + s.EnableBanner = NewPointer(false) + } + + if s.BannerText == nil { + s.BannerText = NewPointer("") + } + + if s.BannerColor == nil { + s.BannerColor = NewPointer(AnnouncementSettingsDefaultBannerColor) + } + + if s.BannerTextColor == nil { + s.BannerTextColor = NewPointer(AnnouncementSettingsDefaultBannerTextColor) + } + + if s.AllowBannerDismissal == nil { + s.AllowBannerDismissal = NewPointer(true) + } + + if s.AdminNoticesEnabled == nil { + s.AdminNoticesEnabled = NewPointer(true) + } + + if s.UserNoticesEnabled == nil { + s.UserNoticesEnabled = NewPointer(true) + } + if s.NoticesURL == nil { + s.NoticesURL = NewPointer(AnnouncementSettingsDefaultNoticesJsonURL) + } + if s.NoticesSkipCache == nil { + s.NoticesSkipCache = NewPointer(false) + } + if s.NoticesFetchFrequency == nil { + s.NoticesFetchFrequency = NewPointer(AnnouncementSettingsDefaultNoticesFetchFrequencySeconds) + } +} + +type ThemeSettings struct { + EnableThemeSelection *bool `access:"experimental_features"` + DefaultTheme *string `access:"experimental_features"` + AllowCustomThemes *bool `access:"experimental_features"` + AllowedThemes []string +} + +func (s *ThemeSettings) SetDefaults() { + if s.EnableThemeSelection == nil { + s.EnableThemeSelection = NewPointer(true) + } + + if s.DefaultTheme == nil { + s.DefaultTheme = NewPointer(TeamSettingsDefaultTeamText) + } + + if s.AllowCustomThemes == nil { + s.AllowCustomThemes = NewPointer(true) + } + + if s.AllowedThemes == nil { + s.AllowedThemes = []string{} + } +} + +type TeamSettings struct { + SiteName *string `access:"site_customization"` + MaxUsersPerTeam *int `access:"site_users_and_teams"` + EnableJoinLeaveMessageByDefault *bool `access:"site_users_and_teams"` + EnableUserCreation *bool `access:"authentication_signup"` + EnableOpenServer *bool `access:"authentication_signup"` + EnableUserDeactivation *bool `access:"experimental_features"` + RestrictCreationToDomains *string `access:"authentication_signup"` // telemetry: none + EnableCustomUserStatuses *bool `access:"site_users_and_teams"` + EnableCustomBrand *bool `access:"site_customization"` + CustomBrandText *string `access:"site_customization"` + CustomDescriptionText *string `access:"site_customization"` + RestrictDirectMessage *string `access:"site_users_and_teams"` + EnableLastActiveTime *bool `access:"site_users_and_teams"` + // In seconds. + UserStatusAwayTimeout *int64 `access:"experimental_features"` + MaxChannelsPerTeam *int64 `access:"site_users_and_teams"` + MaxNotificationsPerChannel *int64 `access:"environment_push_notification_server"` + EnableConfirmNotificationsToChannel *bool `access:"site_notifications"` + TeammateNameDisplay *string `access:"site_users_and_teams"` + // Deprecated: This field is no longer in use, and should always be true. + ExperimentalViewArchivedChannels *bool `access:"experimental_features,site_users_and_teams"` + ExperimentalEnableAutomaticReplies *bool `access:"experimental_features"` + LockTeammateNameDisplay *bool `access:"site_users_and_teams"` + ExperimentalPrimaryTeam *string `access:"experimental_features"` + ExperimentalDefaultChannels []string `access:"experimental_features"` +} + +func (s *TeamSettings) SetDefaults() { + if s.SiteName == nil || *s.SiteName == "" { + s.SiteName = NewPointer(TeamSettingsDefaultSiteName) + } + + if s.MaxUsersPerTeam == nil { + s.MaxUsersPerTeam = NewPointer(TeamSettingsDefaultMaxUsersPerTeam) + } + + if s.EnableJoinLeaveMessageByDefault == nil { + s.EnableJoinLeaveMessageByDefault = NewPointer(true) + } + + if s.EnableUserCreation == nil { + s.EnableUserCreation = NewPointer(true) + } + + if s.EnableOpenServer == nil { + s.EnableOpenServer = NewPointer(false) + } + + if s.RestrictCreationToDomains == nil { + s.RestrictCreationToDomains = NewPointer("") + } + + if s.EnableCustomUserStatuses == nil { + s.EnableCustomUserStatuses = NewPointer(true) + } + + if s.EnableLastActiveTime == nil { + s.EnableLastActiveTime = NewPointer(true) + } + + if s.EnableCustomBrand == nil { + s.EnableCustomBrand = NewPointer(false) + } + + if s.EnableUserDeactivation == nil { + s.EnableUserDeactivation = NewPointer(false) + } + + if s.CustomBrandText == nil { + s.CustomBrandText = NewPointer(TeamSettingsDefaultCustomBrandText) + } + + if s.CustomDescriptionText == nil { + s.CustomDescriptionText = NewPointer(TeamSettingsDefaultCustomDescriptionText) + } + + if s.RestrictDirectMessage == nil { + s.RestrictDirectMessage = NewPointer(DirectMessageAny) + } + + if s.UserStatusAwayTimeout == nil { + s.UserStatusAwayTimeout = NewPointer(int64(TeamSettingsDefaultUserStatusAwayTimeout)) + } + + if s.MaxChannelsPerTeam == nil { + s.MaxChannelsPerTeam = NewPointer(int64(2000)) + } + + if s.MaxNotificationsPerChannel == nil { + s.MaxNotificationsPerChannel = NewPointer(int64(1000)) + } + + if s.EnableConfirmNotificationsToChannel == nil { + s.EnableConfirmNotificationsToChannel = NewPointer(true) + } + + if s.ExperimentalEnableAutomaticReplies == nil { + s.ExperimentalEnableAutomaticReplies = NewPointer(false) + } + + if s.ExperimentalPrimaryTeam == nil { + s.ExperimentalPrimaryTeam = NewPointer("") + } + + if s.ExperimentalDefaultChannels == nil { + s.ExperimentalDefaultChannels = []string{} + } + + if s.EnableUserCreation == nil { + s.EnableUserCreation = NewPointer(true) + } + + if s.ExperimentalViewArchivedChannels == nil { + s.ExperimentalViewArchivedChannels = NewPointer(true) + } + + if s.LockTeammateNameDisplay == nil { + s.LockTeammateNameDisplay = NewPointer(false) + } +} + +type ClientRequirements struct { + AndroidLatestVersion string `access:"write_restrictable,cloud_restrictable"` + AndroidMinVersion string `access:"write_restrictable,cloud_restrictable"` + IosLatestVersion string `access:"write_restrictable,cloud_restrictable"` + IosMinVersion string `access:"write_restrictable,cloud_restrictable"` +} + +type LdapSettings struct { + // Basic + Enable *bool `access:"authentication_ldap"` + EnableSync *bool `access:"authentication_ldap"` + LdapServer *string `access:"authentication_ldap"` // telemetry: none + LdapPort *int `access:"authentication_ldap"` // telemetry: none + ConnectionSecurity *string `access:"authentication_ldap"` + BaseDN *string `access:"authentication_ldap"` // telemetry: none + BindUsername *string `access:"authentication_ldap"` // telemetry: none + BindPassword *string `access:"authentication_ldap"` // telemetry: none + MaximumLoginAttempts *int `access:"authentication_ldap"` // telemetry: none + + // Filtering + UserFilter *string `access:"authentication_ldap"` // telemetry: none + GroupFilter *string `access:"authentication_ldap"` + GuestFilter *string `access:"authentication_ldap"` + EnableAdminFilter *bool + AdminFilter *string + + // Group Mapping + GroupDisplayNameAttribute *string `access:"authentication_ldap"` + GroupIdAttribute *string `access:"authentication_ldap"` + + // User Mapping + FirstNameAttribute *string `access:"authentication_ldap"` + LastNameAttribute *string `access:"authentication_ldap"` + EmailAttribute *string `access:"authentication_ldap"` + UsernameAttribute *string `access:"authentication_ldap"` + NicknameAttribute *string `access:"authentication_ldap"` + IdAttribute *string `access:"authentication_ldap"` + PositionAttribute *string `access:"authentication_ldap"` + LoginIdAttribute *string `access:"authentication_ldap"` + PictureAttribute *string `access:"authentication_ldap"` + + // Synchronization + SyncIntervalMinutes *int `access:"authentication_ldap"` + ReAddRemovedMembers *bool `access:"authentication_ldap"` + + // Advanced + SkipCertificateVerification *bool `access:"authentication_ldap"` + PublicCertificateFile *string `access:"authentication_ldap"` + PrivateKeyFile *string `access:"authentication_ldap"` + QueryTimeout *int `access:"authentication_ldap"` + MaxPageSize *int `access:"authentication_ldap"` + + // Customization + LoginFieldName *string `access:"authentication_ldap"` + + LoginButtonColor *string `access:"experimental_features"` + LoginButtonBorderColor *string `access:"experimental_features"` + LoginButtonTextColor *string `access:"experimental_features"` +} + +func (s *LdapSettings) SetDefaults() { + if s.Enable == nil { + s.Enable = NewPointer(false) + } + + // When unset should default to LDAP Enabled + if s.EnableSync == nil { + s.EnableSync = NewPointer(*s.Enable) + } + + if s.EnableAdminFilter == nil { + s.EnableAdminFilter = NewPointer(false) + } + + if s.LdapServer == nil { + s.LdapServer = NewPointer("") + } + + if s.LdapPort == nil { + s.LdapPort = NewPointer(389) + } + + if s.ConnectionSecurity == nil { + s.ConnectionSecurity = NewPointer("") + } + + if s.PublicCertificateFile == nil { + s.PublicCertificateFile = NewPointer("") + } + + if s.PrivateKeyFile == nil { + s.PrivateKeyFile = NewPointer("") + } + + if s.BaseDN == nil { + s.BaseDN = NewPointer("") + } + + if s.BindUsername == nil { + s.BindUsername = NewPointer("") + } + + if s.BindPassword == nil { + s.BindPassword = NewPointer("") + } + + if s.MaximumLoginAttempts == nil { + s.MaximumLoginAttempts = NewPointer(LdapSettingsDefaultMaximumLoginAttempts) + } + + if s.UserFilter == nil { + s.UserFilter = NewPointer("") + } + + if s.GuestFilter == nil { + s.GuestFilter = NewPointer("") + } + + if s.AdminFilter == nil { + s.AdminFilter = NewPointer("") + } + + if s.GroupFilter == nil { + s.GroupFilter = NewPointer("") + } + + if s.GroupDisplayNameAttribute == nil { + s.GroupDisplayNameAttribute = NewPointer(LdapSettingsDefaultGroupDisplayNameAttribute) + } + + if s.GroupIdAttribute == nil { + s.GroupIdAttribute = NewPointer(LdapSettingsDefaultGroupIdAttribute) + } + + if s.FirstNameAttribute == nil { + s.FirstNameAttribute = NewPointer(LdapSettingsDefaultFirstNameAttribute) + } + + if s.LastNameAttribute == nil { + s.LastNameAttribute = NewPointer(LdapSettingsDefaultLastNameAttribute) + } + + if s.EmailAttribute == nil { + s.EmailAttribute = NewPointer(LdapSettingsDefaultEmailAttribute) + } + + if s.UsernameAttribute == nil { + s.UsernameAttribute = NewPointer(LdapSettingsDefaultUsernameAttribute) + } + + if s.NicknameAttribute == nil { + s.NicknameAttribute = NewPointer(LdapSettingsDefaultNicknameAttribute) + } + + if s.IdAttribute == nil { + s.IdAttribute = NewPointer(LdapSettingsDefaultIdAttribute) + } + + if s.PositionAttribute == nil { + s.PositionAttribute = NewPointer(LdapSettingsDefaultPositionAttribute) + } + + if s.PictureAttribute == nil { + s.PictureAttribute = NewPointer(LdapSettingsDefaultPictureAttribute) + } + + // For those upgrading to the version when LoginIdAttribute was added + // they need IdAttribute == LoginIdAttribute not to break + if s.LoginIdAttribute == nil { + s.LoginIdAttribute = s.IdAttribute + } + + if s.SyncIntervalMinutes == nil { + s.SyncIntervalMinutes = NewPointer(60) + } + + if s.ReAddRemovedMembers == nil { + s.ReAddRemovedMembers = NewPointer(false) + } + + if s.SkipCertificateVerification == nil { + s.SkipCertificateVerification = NewPointer(false) + } + + if s.QueryTimeout == nil { + s.QueryTimeout = NewPointer(60) + } + + if s.MaxPageSize == nil { + s.MaxPageSize = NewPointer(0) + } + + if s.LoginFieldName == nil { + s.LoginFieldName = NewPointer(LdapSettingsDefaultLoginFieldName) + } + + if s.LoginButtonColor == nil { + s.LoginButtonColor = NewPointer("#0000") + } + + if s.LoginButtonBorderColor == nil { + s.LoginButtonBorderColor = NewPointer("#2389D7") + } + + if s.LoginButtonTextColor == nil { + s.LoginButtonTextColor = NewPointer("#2389D7") + } +} + +type ComplianceSettings struct { + Enable *bool `access:"compliance_compliance_monitoring"` + Directory *string `access:"compliance_compliance_monitoring"` // telemetry: none + EnableDaily *bool `access:"compliance_compliance_monitoring"` + BatchSize *int `access:"compliance_compliance_monitoring"` // telemetry: none +} + +func (s *ComplianceSettings) SetDefaults() { + if s.Enable == nil { + s.Enable = NewPointer(false) + } + + if s.Directory == nil { + s.Directory = NewPointer("./data/") + } + + if s.EnableDaily == nil { + s.EnableDaily = NewPointer(false) + } + + if s.BatchSize == nil { + s.BatchSize = NewPointer(30000) + } +} + +type LocalizationSettings struct { + DefaultServerLocale *string `access:"site_localization"` + DefaultClientLocale *string `access:"site_localization"` + AvailableLocales *string `access:"site_localization"` + EnableExperimentalLocales *bool `access:"site_localization"` +} + +func (s *LocalizationSettings) SetDefaults() { + if s.DefaultServerLocale == nil { + s.DefaultServerLocale = NewPointer(DefaultLocale) + } + + if s.DefaultClientLocale == nil { + s.DefaultClientLocale = NewPointer(DefaultLocale) + } + + if s.AvailableLocales == nil { + s.AvailableLocales = NewPointer("") + } + + if s.EnableExperimentalLocales == nil { + s.EnableExperimentalLocales = NewPointer(false) + } +} + +type AutoTranslationSettings struct { + Enable *bool `access:"site_localization,cloud_restrictable"` + Provider *string `access:"site_localization,cloud_restrictable"` + TimeoutsMs *AutoTranslationTimeoutsInMs `access:"site_localization,cloud_restrictable"` + LibreTranslate *LibreTranslateProviderSettings `access:"site_localization,cloud_restrictable"` + // TODO: Enable Agents provider in future release + // Agents *AgentsProviderSettings `access:"site_localization,cloud_restrictable"` +} + +type AutoTranslationTimeoutsInMs struct { + NewPost *int `access:"site_localization,cloud_restrictable"` + Fetch *int `access:"site_localization,cloud_restrictable"` + Notification *int `access:"site_localization,cloud_restrictable"` +} + +type LibreTranslateProviderSettings struct { + URL *string `access:"site_localization,cloud_restrictable"` + APIKey *string `access:"site_localization,cloud_restrictable"` +} + +// TODO: Enable Agents provider in future release +// type AgentsProviderSettings struct { +// BotUserId *string `access:"site_localization,cloud_restrictable"` +// } + +func (s *AutoTranslationSettings) SetDefaults() { + if s.Enable == nil { + s.Enable = NewPointer(false) + } + + if s.Provider == nil { + s.Provider = NewPointer("") + } + + if s.TimeoutsMs == nil { + s.TimeoutsMs = &AutoTranslationTimeoutsInMs{} + } + s.TimeoutsMs.SetDefaults() + + if s.LibreTranslate == nil { + s.LibreTranslate = &LibreTranslateProviderSettings{} + } + s.LibreTranslate.SetDefaults() + + // TODO: Enable Agents provider in future release + // if s.Agents == nil { + // s.Agents = &AgentsProviderSettings{} + // } + // s.Agents.SetDefaults() +} + +func (s *AutoTranslationTimeoutsInMs) SetDefaults() { + if s.NewPost == nil { + s.NewPost = NewPointer(800) + } + + if s.Fetch == nil { + s.Fetch = NewPointer(2000) + } + + if s.Notification == nil { + s.Notification = NewPointer(300) + } +} + +func (s *LibreTranslateProviderSettings) SetDefaults() { + if s.URL == nil { + s.URL = NewPointer("") + } + + if s.APIKey == nil { + s.APIKey = NewPointer("") + } +} + +// TODO: Enable Agents provider in future release +// func (s *AgentsProviderSettings) SetDefaults() { +// if s.BotUserId == nil { +// s.BotUserId = NewPointer("") +// } +// } + +type SamlSettings struct { + // Basic + Enable *bool `access:"authentication_saml"` + EnableSyncWithLdap *bool `access:"authentication_saml"` + EnableSyncWithLdapIncludeAuth *bool `access:"authentication_saml"` + IgnoreGuestsLdapSync *bool `access:"authentication_saml"` + + Verify *bool `access:"authentication_saml"` + Encrypt *bool `access:"authentication_saml"` + SignRequest *bool `access:"authentication_saml"` + + IdpURL *string `access:"authentication_saml"` // telemetry: none + IdpDescriptorURL *string `access:"authentication_saml"` // telemetry: none + IdpMetadataURL *string `access:"authentication_saml"` // telemetry: none + ServiceProviderIdentifier *string `access:"authentication_saml"` // telemetry: none + AssertionConsumerServiceURL *string `access:"authentication_saml"` // telemetry: none + + SignatureAlgorithm *string `access:"authentication_saml"` + CanonicalAlgorithm *string `access:"authentication_saml"` + + ScopingIDPProviderId *string `access:"authentication_saml"` + ScopingIDPName *string `access:"authentication_saml"` + + IdpCertificateFile *string `access:"authentication_saml"` // telemetry: none + PublicCertificateFile *string `access:"authentication_saml"` // telemetry: none + PrivateKeyFile *string `access:"authentication_saml"` // telemetry: none + + // User Mapping + IdAttribute *string `access:"authentication_saml"` + GuestAttribute *string `access:"authentication_saml"` + EnableAdminAttribute *bool + AdminAttribute *string + FirstNameAttribute *string `access:"authentication_saml"` + LastNameAttribute *string `access:"authentication_saml"` + EmailAttribute *string `access:"authentication_saml"` + UsernameAttribute *string `access:"authentication_saml"` + NicknameAttribute *string `access:"authentication_saml"` + LocaleAttribute *string `access:"authentication_saml"` + PositionAttribute *string `access:"authentication_saml"` + + LoginButtonText *string `access:"authentication_saml"` + + LoginButtonColor *string `access:"experimental_features"` + LoginButtonBorderColor *string `access:"experimental_features"` + LoginButtonTextColor *string `access:"experimental_features"` +} + +func (s *SamlSettings) SetDefaults() { + if s.Enable == nil { + s.Enable = NewPointer(false) + } + + if s.EnableSyncWithLdap == nil { + s.EnableSyncWithLdap = NewPointer(false) + } + + if s.EnableSyncWithLdapIncludeAuth == nil { + s.EnableSyncWithLdapIncludeAuth = NewPointer(false) + } + + if s.IgnoreGuestsLdapSync == nil { + s.IgnoreGuestsLdapSync = NewPointer(false) + } + + if s.EnableAdminAttribute == nil { + s.EnableAdminAttribute = NewPointer(false) + } + + if s.Verify == nil { + s.Verify = NewPointer(true) + } + + if s.Encrypt == nil { + s.Encrypt = NewPointer(true) + } + + if s.SignRequest == nil { + s.SignRequest = NewPointer(false) + } + + if s.SignatureAlgorithm == nil { + s.SignatureAlgorithm = NewPointer(SamlSettingsDefaultSignatureAlgorithm) + } + + if s.CanonicalAlgorithm == nil { + s.CanonicalAlgorithm = NewPointer(SamlSettingsDefaultCanonicalAlgorithm) + } + + if s.IdpURL == nil { + s.IdpURL = NewPointer("") + } + + if s.IdpDescriptorURL == nil { + s.IdpDescriptorURL = NewPointer("") + } + + if s.ServiceProviderIdentifier == nil { + if s.IdpDescriptorURL != nil { + s.ServiceProviderIdentifier = NewPointer(*s.IdpDescriptorURL) + } else { + s.ServiceProviderIdentifier = NewPointer("") + } + } + + if s.IdpMetadataURL == nil { + s.IdpMetadataURL = NewPointer("") + } + + if s.IdpCertificateFile == nil { + s.IdpCertificateFile = NewPointer("") + } + + if s.PublicCertificateFile == nil { + s.PublicCertificateFile = NewPointer("") + } + + if s.PrivateKeyFile == nil { + s.PrivateKeyFile = NewPointer("") + } + + if s.AssertionConsumerServiceURL == nil { + s.AssertionConsumerServiceURL = NewPointer("") + } + + if s.ScopingIDPProviderId == nil { + s.ScopingIDPProviderId = NewPointer("") + } + + if s.ScopingIDPName == nil { + s.ScopingIDPName = NewPointer("") + } + + if s.LoginButtonText == nil || *s.LoginButtonText == "" { + s.LoginButtonText = NewPointer(UserAuthServiceSamlText) + } + + if s.IdAttribute == nil { + s.IdAttribute = NewPointer(SamlSettingsDefaultIdAttribute) + } + + if s.GuestAttribute == nil { + s.GuestAttribute = NewPointer(SamlSettingsDefaultGuestAttribute) + } + if s.AdminAttribute == nil { + s.AdminAttribute = NewPointer(SamlSettingsDefaultAdminAttribute) + } + if s.FirstNameAttribute == nil { + s.FirstNameAttribute = NewPointer(SamlSettingsDefaultFirstNameAttribute) + } + + if s.LastNameAttribute == nil { + s.LastNameAttribute = NewPointer(SamlSettingsDefaultLastNameAttribute) + } + + if s.EmailAttribute == nil { + s.EmailAttribute = NewPointer(SamlSettingsDefaultEmailAttribute) + } + + if s.UsernameAttribute == nil { + s.UsernameAttribute = NewPointer(SamlSettingsDefaultUsernameAttribute) + } + + if s.NicknameAttribute == nil { + s.NicknameAttribute = NewPointer(SamlSettingsDefaultNicknameAttribute) + } + + if s.PositionAttribute == nil { + s.PositionAttribute = NewPointer(SamlSettingsDefaultPositionAttribute) + } + + if s.LocaleAttribute == nil { + s.LocaleAttribute = NewPointer(SamlSettingsDefaultLocaleAttribute) + } + + if s.LoginButtonColor == nil { + s.LoginButtonColor = NewPointer("#34a28b") + } + + if s.LoginButtonBorderColor == nil { + s.LoginButtonBorderColor = NewPointer("#2389D7") + } + + if s.LoginButtonTextColor == nil { + s.LoginButtonTextColor = NewPointer("#ffffff") + } +} + +type NativeAppSettings struct { + AppCustomURLSchemes []string `access:"site_customization,write_restrictable,cloud_restrictable"` // telemetry: none + AppDownloadLink *string `access:"site_customization,write_restrictable,cloud_restrictable"` + AndroidAppDownloadLink *string `access:"site_customization,write_restrictable,cloud_restrictable"` + IosAppDownloadLink *string `access:"site_customization,write_restrictable,cloud_restrictable"` + MobileExternalBrowser *bool `access:"site_customization,write_restrictable,cloud_restrictable"` + MobileEnableBiometrics *bool `access:"site_customization,write_restrictable"` + MobilePreventScreenCapture *bool `access:"site_customization,write_restrictable"` + MobileJailbreakProtection *bool `access:"site_customization,write_restrictable"` + MobileEnableSecureFilePreview *bool `access:"site_customization,write_restrictable"` + MobileAllowPdfLinkNavigation *bool `access:"site_customization,write_restrictable"` +} + +func (s *NativeAppSettings) SetDefaults() { + if s.AppDownloadLink == nil { + s.AppDownloadLink = NewPointer(NativeappSettingsDefaultAppDownloadLink) + } + + if s.AndroidAppDownloadLink == nil { + s.AndroidAppDownloadLink = NewPointer(NativeappSettingsDefaultAndroidAppDownloadLink) + } + + if s.IosAppDownloadLink == nil { + s.IosAppDownloadLink = NewPointer(NativeappSettingsDefaultIosAppDownloadLink) + } + + if s.AppCustomURLSchemes == nil { + s.AppCustomURLSchemes = GetDefaultAppCustomURLSchemes() + } + + if s.MobileExternalBrowser == nil { + s.MobileExternalBrowser = NewPointer(false) + } + + if s.MobileEnableBiometrics == nil { + s.MobileEnableBiometrics = NewPointer(false) + } + + if s.MobilePreventScreenCapture == nil { + s.MobilePreventScreenCapture = NewPointer(false) + } + + if s.MobileJailbreakProtection == nil { + s.MobileJailbreakProtection = NewPointer(false) + } + + if s.MobileEnableSecureFilePreview == nil { + s.MobileEnableSecureFilePreview = NewPointer(false) + } + + if s.MobileAllowPdfLinkNavigation == nil { + s.MobileAllowPdfLinkNavigation = NewPointer(false) + } +} + +type ElasticsearchSettings struct { + ConnectionURL *string `access:"environment_elasticsearch,write_restrictable,cloud_restrictable"` + Backend *string `access:"environment_elasticsearch,write_restrictable,cloud_restrictable"` + Username *string `access:"environment_elasticsearch,write_restrictable,cloud_restrictable"` + Password *string `access:"environment_elasticsearch,write_restrictable,cloud_restrictable"` + EnableIndexing *bool `access:"environment_elasticsearch,write_restrictable,cloud_restrictable"` + EnableSearching *bool `access:"environment_elasticsearch,write_restrictable,cloud_restrictable"` + EnableAutocomplete *bool `access:"environment_elasticsearch,write_restrictable,cloud_restrictable"` + Sniff *bool `access:"environment_elasticsearch,write_restrictable,cloud_restrictable"` + PostIndexReplicas *int `access:"environment_elasticsearch,write_restrictable,cloud_restrictable"` + PostIndexShards *int `access:"environment_elasticsearch,write_restrictable,cloud_restrictable"` + ChannelIndexReplicas *int `access:"environment_elasticsearch,write_restrictable,cloud_restrictable"` + ChannelIndexShards *int `access:"environment_elasticsearch,write_restrictable,cloud_restrictable"` + UserIndexReplicas *int `access:"environment_elasticsearch,write_restrictable,cloud_restrictable"` + UserIndexShards *int `access:"environment_elasticsearch,write_restrictable,cloud_restrictable"` + AggregatePostsAfterDays *int `access:"environment_elasticsearch,write_restrictable,cloud_restrictable"` // telemetry: none + PostsAggregatorJobStartTime *string `access:"environment_elasticsearch,write_restrictable,cloud_restrictable"` // telemetry: none + IndexPrefix *string `access:"environment_elasticsearch,write_restrictable,cloud_restrictable"` + GlobalSearchPrefix *string `access:"environment_elasticsearch,write_restrictable,cloud_restrictable"` + LiveIndexingBatchSize *int `access:"environment_elasticsearch,write_restrictable,cloud_restrictable"` + BulkIndexingTimeWindowSeconds *int `json:",omitempty"` // telemetry: none + BatchSize *int `access:"environment_elasticsearch,write_restrictable,cloud_restrictable"` + RequestTimeoutSeconds *int `access:"environment_elasticsearch,write_restrictable,cloud_restrictable"` + SkipTLSVerification *bool `access:"environment_elasticsearch,write_restrictable,cloud_restrictable"` + CA *string `access:"environment_elasticsearch,write_restrictable,cloud_restrictable"` + ClientCert *string `access:"environment_elasticsearch,write_restrictable,cloud_restrictable"` + ClientKey *string `access:"environment_elasticsearch,write_restrictable,cloud_restrictable"` + Trace *string `access:"environment_elasticsearch,write_restrictable,cloud_restrictable"` + IgnoredPurgeIndexes *string `access:"environment_elasticsearch,write_restrictable,cloud_restrictable"` // telemetry: none +} + +func (s *ElasticsearchSettings) SetDefaults() { + if s.ConnectionURL == nil { + s.ConnectionURL = NewPointer(ElasticsearchSettingsDefaultConnectionURL) + } + + if s.Backend == nil { + s.Backend = NewPointer(ElasticsearchSettingsESBackend) + } + + if s.Username == nil { + s.Username = NewPointer(ElasticsearchSettingsDefaultUsername) + } + + if s.Password == nil { + s.Password = NewPointer(ElasticsearchSettingsDefaultPassword) + } + + if s.CA == nil { + s.CA = NewPointer("") + } + + if s.ClientCert == nil { + s.ClientCert = NewPointer("") + } + + if s.ClientKey == nil { + s.ClientKey = NewPointer("") + } + + if s.EnableIndexing == nil { + s.EnableIndexing = NewPointer(false) + } + + if s.EnableSearching == nil { + s.EnableSearching = NewPointer(false) + } + + if s.EnableAutocomplete == nil { + s.EnableAutocomplete = NewPointer(false) + } + + if s.Sniff == nil { + s.Sniff = NewPointer(true) + } + + if s.PostIndexReplicas == nil { + s.PostIndexReplicas = NewPointer(ElasticsearchSettingsDefaultPostIndexReplicas) + } + + if s.PostIndexShards == nil { + s.PostIndexShards = NewPointer(ElasticsearchSettingsDefaultPostIndexShards) + } + + if s.ChannelIndexReplicas == nil { + s.ChannelIndexReplicas = NewPointer(ElasticsearchSettingsDefaultChannelIndexReplicas) + } + + if s.ChannelIndexShards == nil { + s.ChannelIndexShards = NewPointer(ElasticsearchSettingsDefaultChannelIndexShards) + } + + if s.UserIndexReplicas == nil { + s.UserIndexReplicas = NewPointer(ElasticsearchSettingsDefaultUserIndexReplicas) + } + + if s.UserIndexShards == nil { + s.UserIndexShards = NewPointer(ElasticsearchSettingsDefaultUserIndexShards) + } + + if s.AggregatePostsAfterDays == nil { + s.AggregatePostsAfterDays = NewPointer(ElasticsearchSettingsDefaultAggregatePostsAfterDays) + } + + if s.PostsAggregatorJobStartTime == nil { + s.PostsAggregatorJobStartTime = NewPointer(ElasticsearchSettingsDefaultPostsAggregatorJobStartTime) + } + + if s.IndexPrefix == nil { + s.IndexPrefix = NewPointer(ElasticsearchSettingsDefaultIndexPrefix) + } + + if s.GlobalSearchPrefix == nil { + s.GlobalSearchPrefix = NewPointer("") + } + + if s.LiveIndexingBatchSize == nil { + s.LiveIndexingBatchSize = NewPointer(ElasticsearchSettingsDefaultLiveIndexingBatchSize) + } + + if s.BatchSize == nil { + s.BatchSize = NewPointer(ElasticsearchSettingsDefaultBatchSize) + } + + if s.RequestTimeoutSeconds == nil { + s.RequestTimeoutSeconds = NewPointer(ElasticsearchSettingsDefaultRequestTimeoutSeconds) + } + + if s.SkipTLSVerification == nil { + s.SkipTLSVerification = NewPointer(false) + } + + if s.Trace == nil { + s.Trace = NewPointer("") + } + + if s.IgnoredPurgeIndexes == nil { + s.IgnoredPurgeIndexes = NewPointer("") + } +} + +type DataRetentionSettings struct { + EnableMessageDeletion *bool `access:"compliance_data_retention_policy"` + EnableFileDeletion *bool `access:"compliance_data_retention_policy"` + EnableBoardsDeletion *bool `access:"compliance_data_retention_policy"` + MessageRetentionDays *int `access:"compliance_data_retention_policy"` // Deprecated: use `MessageRetentionHours` + MessageRetentionHours *int `access:"compliance_data_retention_policy"` + FileRetentionDays *int `access:"compliance_data_retention_policy"` // Deprecated: use `FileRetentionHours` + FileRetentionHours *int `access:"compliance_data_retention_policy"` + BoardsRetentionDays *int `access:"compliance_data_retention_policy"` + DeletionJobStartTime *string `access:"compliance_data_retention_policy"` + BatchSize *int `access:"compliance_data_retention_policy"` + TimeBetweenBatchesMilliseconds *int `access:"compliance_data_retention_policy"` + RetentionIdsBatchSize *int `access:"compliance_data_retention_policy"` + PreservePinnedPosts *bool `access:"compliance_data_retention_policy"` +} + +func (s *DataRetentionSettings) SetDefaults() { + if s.EnableMessageDeletion == nil { + s.EnableMessageDeletion = NewPointer(false) + } + + if s.EnableFileDeletion == nil { + s.EnableFileDeletion = NewPointer(false) + } + + if s.EnableBoardsDeletion == nil { + s.EnableBoardsDeletion = NewPointer(false) + } + + if s.MessageRetentionDays == nil { + s.MessageRetentionDays = NewPointer(DataRetentionSettingsDefaultMessageRetentionDays) + } + + if s.MessageRetentionHours == nil { + s.MessageRetentionHours = NewPointer(DataRetentionSettingsDefaultMessageRetentionHours) + } + + if s.FileRetentionDays == nil { + s.FileRetentionDays = NewPointer(DataRetentionSettingsDefaultFileRetentionDays) + } + + if s.FileRetentionHours == nil { + s.FileRetentionHours = NewPointer(DataRetentionSettingsDefaultFileRetentionHours) + } + + if s.BoardsRetentionDays == nil { + s.BoardsRetentionDays = NewPointer(DataRetentionSettingsDefaultBoardsRetentionDays) + } + + if s.DeletionJobStartTime == nil { + s.DeletionJobStartTime = NewPointer(DataRetentionSettingsDefaultDeletionJobStartTime) + } + + if s.BatchSize == nil { + s.BatchSize = NewPointer(DataRetentionSettingsDefaultBatchSize) + } + + if s.TimeBetweenBatchesMilliseconds == nil { + s.TimeBetweenBatchesMilliseconds = NewPointer(DataRetentionSettingsDefaultTimeBetweenBatchesMilliseconds) + } + if s.RetentionIdsBatchSize == nil { + s.RetentionIdsBatchSize = NewPointer(DataRetentionSettingsDefaultRetentionIdsBatchSize) + } + + if s.PreservePinnedPosts == nil { + s.PreservePinnedPosts = NewPointer(false) + } +} + +// GetMessageRetentionHours returns the message retention time as an int. +// MessageRetentionHours takes precedence over the deprecated MessageRetentionDays. +func (s *DataRetentionSettings) GetMessageRetentionHours() int { + if s.MessageRetentionHours != nil && *s.MessageRetentionHours > 0 { + return *s.MessageRetentionHours + } + if s.MessageRetentionDays != nil && *s.MessageRetentionDays > 0 { + return *s.MessageRetentionDays * 24 + } + return DataRetentionSettingsDefaultMessageRetentionDays * 24 +} + +// GetFileRetentionHours returns the message retention time as an int. +// FileRetentionHours takes precedence over the deprecated FileRetentionDays. +func (s *DataRetentionSettings) GetFileRetentionHours() int { + if s.FileRetentionHours != nil && *s.FileRetentionHours > 0 { + return *s.FileRetentionHours + } + if s.FileRetentionDays != nil && *s.FileRetentionDays > 0 { + return *s.FileRetentionDays * 24 + } + return DataRetentionSettingsDefaultFileRetentionDays * 24 +} + +type JobSettings struct { + RunJobs *bool `access:"write_restrictable,cloud_restrictable"` // telemetry: none + RunScheduler *bool `access:"write_restrictable,cloud_restrictable"` // telemetry: none + CleanupJobsThresholdDays *int `access:"write_restrictable,cloud_restrictable"` + CleanupConfigThresholdDays *int `access:"write_restrictable,cloud_restrictable"` +} + +func (s *JobSettings) SetDefaults() { + if s.RunJobs == nil { + s.RunJobs = NewPointer(true) + } + + if s.RunScheduler == nil { + s.RunScheduler = NewPointer(true) + } + + if s.CleanupJobsThresholdDays == nil { + s.CleanupJobsThresholdDays = NewPointer(-1) + } + + if s.CleanupConfigThresholdDays == nil { + s.CleanupConfigThresholdDays = NewPointer(-1) + } +} + +type CloudSettings struct { + CWSURL *string `access:"write_restrictable"` + CWSAPIURL *string `access:"write_restrictable"` + CWSMock *bool `access:"write_restrictable"` + Disable *bool `access:"write_restrictable,cloud_restrictable"` + PreviewModalBucketURL *string `access:"write_restrictable"` +} + +func (s *CloudSettings) SetDefaults() { + serviceEnvironment := GetServiceEnvironment() + if s.CWSURL == nil || serviceEnvironment == ServiceEnvironmentProduction { + switch serviceEnvironment { + case ServiceEnvironmentProduction: + s.CWSURL = NewPointer(CloudSettingsDefaultCwsURL) + case ServiceEnvironmentTest, ServiceEnvironmentDev: + s.CWSURL = NewPointer(CloudSettingsDefaultCwsURLTest) + } + } + + if s.CWSAPIURL == nil { + switch serviceEnvironment { + case ServiceEnvironmentProduction: + s.CWSAPIURL = NewPointer(CloudSettingsDefaultCwsAPIURL) + case ServiceEnvironmentTest, ServiceEnvironmentDev: + s.CWSAPIURL = NewPointer(CloudSettingsDefaultCwsAPIURLTest) + } + } + if s.CWSMock == nil { + isMockCws := MockCWS == "true" + s.CWSMock = &isMockCws + } + + if s.Disable == nil { + s.Disable = NewPointer(false) + } + + if s.PreviewModalBucketURL == nil { + s.PreviewModalBucketURL = NewPointer("") + } +} + +type PluginState struct { + Enable bool +} + +type PluginSettings struct { + Enable *bool `access:"plugins,write_restrictable"` + EnableUploads *bool `access:"plugins,write_restrictable,cloud_restrictable"` + AllowInsecureDownloadURL *bool `access:"plugins,write_restrictable,cloud_restrictable"` + EnableHealthCheck *bool `access:"plugins,write_restrictable,cloud_restrictable"` + Directory *string `access:"plugins,write_restrictable,cloud_restrictable"` // telemetry: none + ClientDirectory *string `access:"plugins,write_restrictable,cloud_restrictable"` // telemetry: none + Plugins map[string]map[string]any `access:"plugins"` // telemetry: none + PluginStates map[string]*PluginState `access:"plugins"` // telemetry: none + EnableMarketplace *bool `access:"plugins,write_restrictable,cloud_restrictable"` + EnableRemoteMarketplace *bool `access:"plugins,write_restrictable,cloud_restrictable"` + AutomaticPrepackagedPlugins *bool `access:"plugins,write_restrictable,cloud_restrictable"` + RequirePluginSignature *bool `access:"plugins,write_restrictable,cloud_restrictable"` + MarketplaceURL *string `access:"plugins,write_restrictable,cloud_restrictable"` + SignaturePublicKeyFiles []string `access:"plugins,write_restrictable,cloud_restrictable"` + ChimeraOAuthProxyURL *string `access:"plugins,write_restrictable,cloud_restrictable"` +} + +func (s *PluginSettings) SetDefaults(ls LogSettings) { + if s.Enable == nil { + s.Enable = NewPointer(true) + } + + if s.EnableUploads == nil { + s.EnableUploads = NewPointer(false) + } + + if s.AllowInsecureDownloadURL == nil { + s.AllowInsecureDownloadURL = NewPointer(false) + } + + if s.EnableHealthCheck == nil { + s.EnableHealthCheck = NewPointer(true) + } + + if s.Directory == nil || *s.Directory == "" { + s.Directory = NewPointer(PluginSettingsDefaultDirectory) + } + + if s.ClientDirectory == nil || *s.ClientDirectory == "" { + s.ClientDirectory = NewPointer(PluginSettingsDefaultClientDirectory) + } + + if s.Plugins == nil { + s.Plugins = make(map[string]map[string]any) + } + + if s.PluginStates == nil { + s.PluginStates = make(map[string]*PluginState) + } + + if s.PluginStates[PluginIdNPS] == nil { + // Enable the NPS plugin by default if diagnostics are enabled + s.PluginStates[PluginIdNPS] = &PluginState{Enable: ls.EnableDiagnostics == nil || *ls.EnableDiagnostics} + } + + if s.PluginStates[PluginIdCalls] == nil { + // Enable the calls plugin by default + s.PluginStates[PluginIdCalls] = &PluginState{Enable: true} + } + + if s.PluginStates[PluginIdPlaybooks] == nil { + // Enable the playbooks plugin by default + s.PluginStates[PluginIdPlaybooks] = &PluginState{Enable: true} + } + + if s.PluginStates[PluginIdAI] == nil { + // Enable the AI plugin by default + s.PluginStates[PluginIdAI] = &PluginState{Enable: true} + } + + if s.EnableMarketplace == nil { + s.EnableMarketplace = NewPointer(PluginSettingsDefaultEnableMarketplace) + } + + if s.EnableRemoteMarketplace == nil { + s.EnableRemoteMarketplace = NewPointer(true) + } + + if s.AutomaticPrepackagedPlugins == nil { + s.AutomaticPrepackagedPlugins = NewPointer(true) + } + + if s.MarketplaceURL == nil || *s.MarketplaceURL == "" || *s.MarketplaceURL == PluginSettingsOldMarketplaceURL { + s.MarketplaceURL = NewPointer(PluginSettingsDefaultMarketplaceURL) + } + + if s.RequirePluginSignature == nil { + s.RequirePluginSignature = NewPointer(false) + } + + if s.SignaturePublicKeyFiles == nil { + s.SignaturePublicKeyFiles = []string{} + } + + if s.ChimeraOAuthProxyURL == nil { + s.ChimeraOAuthProxyURL = NewPointer("") + } +} + +// Sanitize cleans up the plugin settings by removing any sensitive information. +// It does so by checking if the setting is marked as secret in the plugin manifest. +// If it is, the setting is replaced with a fake value. +// If a plugin is no longer installed, no stored settings for that plugin are returned. +// If the list of manifests in nil, i.e. plugins are disabled, all settings are sanitized. +func (s *PluginSettings) Sanitize(pluginManifests []*Manifest) { + manifestMap := make(map[string]*Manifest, len(pluginManifests)) + + for _, manifest := range pluginManifests { + manifestMap[manifest.Id] = manifest + } + + for id, settings := range s.Plugins { + manifest := manifestMap[id] + + for key := range settings { + if manifest == nil { + // Don't return plugin settings for plugins that are not installed + delete(s.Plugins, id) + break + } + if manifest.SettingsSchema == nil { + // If the plugin doesn't define any settings, none of them can be secrets. + break + } + + for _, definedSetting := range manifest.SettingsSchema.Settings { + if definedSetting.Secret && strings.EqualFold(definedSetting.Key, key) { + settings[key] = FakeSetting + break + } + } + } + } +} + +type WranglerSettings struct { + PermittedWranglerRoles []string + AllowedEmailDomain []string + MoveThreadMaxCount *int64 + MoveThreadToAnotherTeamEnable *bool + MoveThreadFromPrivateChannelEnable *bool + MoveThreadFromDirectMessageChannelEnable *bool + MoveThreadFromGroupMessageChannelEnable *bool +} + +func (w *WranglerSettings) SetDefaults() { + if w.PermittedWranglerRoles == nil { + w.PermittedWranglerRoles = make([]string, 0) + } + if w.AllowedEmailDomain == nil { + w.AllowedEmailDomain = make([]string, 0) + } + if w.MoveThreadMaxCount == nil { + w.MoveThreadMaxCount = NewPointer(int64(100)) + } + if w.MoveThreadToAnotherTeamEnable == nil { + w.MoveThreadToAnotherTeamEnable = NewPointer(false) + } + if w.MoveThreadFromPrivateChannelEnable == nil { + w.MoveThreadFromPrivateChannelEnable = NewPointer(false) + } + if w.MoveThreadFromDirectMessageChannelEnable == nil { + w.MoveThreadFromDirectMessageChannelEnable = NewPointer(false) + } + if w.MoveThreadFromGroupMessageChannelEnable == nil { + w.MoveThreadFromGroupMessageChannelEnable = NewPointer(false) + } +} + +func (w *WranglerSettings) IsValid() *AppError { + validDomainRegex := regexp.MustCompile(`^(([a-zA-Z]{1})|([a-zA-Z]{1}[a-zA-Z]{1})|([a-zA-Z]{1}[0-9]{1})|([0-9]{1}[a-zA-Z]{1})|([a-zA-Z0-9][a-zA-Z0-9-_]{1,61}[a-zA-Z0-9]))\.([a-zA-Z]{2,6}|[a-zA-Z0-9-]{2,30}\.[a-zA-Z]{2,3})$`) + for _, domain := range w.AllowedEmailDomain { + if !validDomainRegex.MatchString(domain) && domain != "localhost" { + return NewAppError("Config.IsValid", "model.config.is_valid.move_thread.domain_invalid.app_error", nil, "", http.StatusBadRequest) + } + } + + return nil +} + +type ConnectedWorkspacesSettings struct { + EnableSharedChannels *bool + EnableRemoteClusterService *bool + DisableSharedChannelsStatusSync *bool + SyncUsersOnConnectionOpen *bool + GlobalUserSyncBatchSize *int + MaxPostsPerSync *int + MemberSyncBatchSize *int // Maximum number of members to process in a single batch during shared channel synchronization +} + +func (c *ConnectedWorkspacesSettings) SetDefaults(isUpdate bool, e ExperimentalSettings) { + if c.EnableSharedChannels == nil { + if isUpdate && e.EnableSharedChannels != nil { + c.EnableSharedChannels = e.EnableSharedChannels + } else { + c.EnableSharedChannels = NewPointer(false) + } + } + + if c.EnableRemoteClusterService == nil { + if isUpdate && e.EnableRemoteClusterService != nil { + c.EnableRemoteClusterService = e.EnableRemoteClusterService + } else { + c.EnableRemoteClusterService = NewPointer(false) + } + } + + if c.DisableSharedChannelsStatusSync == nil { + c.DisableSharedChannelsStatusSync = NewPointer(false) + } + + if c.SyncUsersOnConnectionOpen == nil { + c.SyncUsersOnConnectionOpen = NewPointer(false) + } + + if c.GlobalUserSyncBatchSize == nil { + c.GlobalUserSyncBatchSize = NewPointer(25) // Default to MaxUsersPerSync + } + + if c.MaxPostsPerSync == nil { + c.MaxPostsPerSync = NewPointer(ConnectedWorkspacesSettingsDefaultMaxPostsPerSync) + } + + if c.MemberSyncBatchSize == nil { + c.MemberSyncBatchSize = NewPointer(ConnectedWorkspacesSettingsDefaultMemberSyncBatchSize) + } +} + +type GlobalRelayMessageExportSettings struct { + CustomerType *string `access:"compliance_compliance_export"` // must be either A9, A10 or CUSTOM, dictates SMTP server url + SMTPUsername *string `access:"compliance_compliance_export"` + SMTPPassword *string `access:"compliance_compliance_export"` + EmailAddress *string `access:"compliance_compliance_export"` // the address to send messages to + SMTPServerTimeout *int `access:"compliance_compliance_export"` + CustomSMTPServerName *string `access:"compliance_compliance_export"` + CustomSMTPPort *string `access:"compliance_compliance_export"` +} + +func (s *GlobalRelayMessageExportSettings) SetDefaults() { + if s.CustomerType == nil { + s.CustomerType = NewPointer(GlobalrelayCustomerTypeA9) + } + if s.SMTPUsername == nil { + s.SMTPUsername = NewPointer("") + } + if s.SMTPPassword == nil { + s.SMTPPassword = NewPointer("") + } + if s.EmailAddress == nil { + s.EmailAddress = NewPointer("") + } + if s.SMTPServerTimeout == nil || *s.SMTPServerTimeout == 0 { + s.SMTPServerTimeout = NewPointer(1800) + } + if s.CustomSMTPServerName == nil { + s.CustomSMTPServerName = NewPointer("") + } + if s.CustomSMTPPort == nil { + s.CustomSMTPPort = NewPointer("25") + } +} + +type MessageExportSettings struct { + EnableExport *bool `access:"compliance_compliance_export"` + ExportFormat *string `access:"compliance_compliance_export"` + DailyRunTime *string `access:"compliance_compliance_export"` + ExportFromTimestamp *int64 `access:"compliance_compliance_export"` + BatchSize *int `access:"compliance_compliance_export"` + DownloadExportResults *bool `access:"compliance_compliance_export"` + ChannelBatchSize *int `access:"compliance_compliance_export"` + ChannelHistoryBatchSize *int `access:"compliance_compliance_export"` + + // formatter-specific settings - these are only expected to be non-nil if ExportFormat is set to the associated format + GlobalRelaySettings *GlobalRelayMessageExportSettings `access:"compliance_compliance_export"` +} + +func (s *MessageExportSettings) SetDefaults() { + if s.EnableExport == nil { + s.EnableExport = NewPointer(false) + } + + if s.DownloadExportResults == nil { + s.DownloadExportResults = NewPointer(false) + } + + if s.ExportFormat == nil { + s.ExportFormat = NewPointer(ComplianceExportTypeActiance) + } + + if s.DailyRunTime == nil { + s.DailyRunTime = NewPointer("01:00") + } + + if s.ExportFromTimestamp == nil { + s.ExportFromTimestamp = NewPointer(int64(0)) + } + + if s.BatchSize == nil { + s.BatchSize = NewPointer(10000) + } + + if s.ChannelBatchSize == nil || *s.ChannelBatchSize == 0 { + s.ChannelBatchSize = NewPointer(ComplianceExportChannelBatchSizeDefault) + } + + if s.ChannelHistoryBatchSize == nil || *s.ChannelHistoryBatchSize == 0 { + s.ChannelHistoryBatchSize = NewPointer(ComplianceExportChannelHistoryBatchSizeDefault) + } + + if s.GlobalRelaySettings == nil { + s.GlobalRelaySettings = &GlobalRelayMessageExportSettings{} + } + s.GlobalRelaySettings.SetDefaults() +} + +type DisplaySettings struct { + CustomURLSchemes []string `access:"site_posts"` + MaxMarkdownNodes *int `access:"site_posts"` +} + +func (s *DisplaySettings) SetDefaults() { + if s.CustomURLSchemes == nil { + customURLSchemes := []string{} + s.CustomURLSchemes = customURLSchemes + } + + if s.MaxMarkdownNodes == nil { + s.MaxMarkdownNodes = NewPointer(0) + } +} + +type GuestAccountsSettings struct { + Enable *bool `access:"authentication_guest_access"` + HideTags *bool `access:"authentication_guest_access"` + AllowEmailAccounts *bool `access:"authentication_guest_access"` + EnforceMultifactorAuthentication *bool `access:"authentication_guest_access"` + RestrictCreationToDomains *string `access:"authentication_guest_access"` +} + +func (s *GuestAccountsSettings) SetDefaults() { + if s.Enable == nil { + s.Enable = NewPointer(false) + } + + if s.HideTags == nil { + s.HideTags = NewPointer(false) + } + + if s.AllowEmailAccounts == nil { + s.AllowEmailAccounts = NewPointer(true) + } + + if s.EnforceMultifactorAuthentication == nil { + s.EnforceMultifactorAuthentication = NewPointer(false) + } + + if s.RestrictCreationToDomains == nil { + s.RestrictCreationToDomains = NewPointer("") + } +} + +type ImageProxySettings struct { + Enable *bool `access:"environment_image_proxy"` + ImageProxyType *string `access:"environment_image_proxy"` + RemoteImageProxyURL *string `access:"environment_image_proxy"` + RemoteImageProxyOptions *string `access:"environment_image_proxy"` +} + +func (s *ImageProxySettings) SetDefaults() { + if s.Enable == nil { + s.Enable = NewPointer(false) + } + + if s.ImageProxyType == nil { + s.ImageProxyType = NewPointer(ImageProxyTypeLocal) + } + + if s.RemoteImageProxyURL == nil { + s.RemoteImageProxyURL = NewPointer("") + } + + if s.RemoteImageProxyOptions == nil { + s.RemoteImageProxyOptions = NewPointer("") + } +} + +// ImportSettings defines configuration settings for file imports. +type ImportSettings struct { + // The directory where to store the imported files. + Directory *string `access:"cloud_restrictable"` + // The number of days to retain the imported files before deleting them. + RetentionDays *int +} + +func (s *ImportSettings) isValid() *AppError { + if *s.Directory == "" { + return NewAppError("Config.IsValid", "model.config.is_valid.import.directory.app_error", nil, "", http.StatusBadRequest) + } + + if *s.RetentionDays <= 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.import.retention_days_too_low.app_error", nil, "", http.StatusBadRequest) + } + + return nil +} + +// SetDefaults applies the default settings to the struct. +func (s *ImportSettings) SetDefaults() { + if s.Directory == nil || *s.Directory == "" { + s.Directory = NewPointer(ImportSettingsDefaultDirectory) + } + + if s.RetentionDays == nil { + s.RetentionDays = NewPointer(ImportSettingsDefaultRetentionDays) + } +} + +// ExportSettings defines configuration settings for file exports. +type ExportSettings struct { + // The directory where to store the exported files. + Directory *string `access:"cloud_restrictable"` // telemetry: none + // The number of days to retain the exported files before deleting them. + RetentionDays *int +} + +func (s *ExportSettings) isValid() *AppError { + if *s.Directory == "" { + return NewAppError("Config.IsValid", "model.config.is_valid.export.directory.app_error", nil, "", http.StatusBadRequest) + } + + if *s.RetentionDays <= 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.export.retention_days_too_low.app_error", nil, "", http.StatusBadRequest) + } + + return nil +} + +// SetDefaults applies the default settings to the struct. +func (s *ExportSettings) SetDefaults() { + if s.Directory == nil || *s.Directory == "" { + s.Directory = NewPointer(ExportSettingsDefaultDirectory) + } + + if s.RetentionDays == nil { + s.RetentionDays = NewPointer(ExportSettingsDefaultRetentionDays) + } +} + +type AccessControlSettings struct { + EnableAttributeBasedAccessControl *bool + EnableChannelScopeAccessControl *bool + EnableUserManagedAttributes *bool `access:"write_restrictable"` +} + +func (s *AccessControlSettings) SetDefaults() { + if s.EnableAttributeBasedAccessControl == nil { + s.EnableAttributeBasedAccessControl = NewPointer(false) + } + + if s.EnableChannelScopeAccessControl == nil { + s.EnableChannelScopeAccessControl = NewPointer(true) + } + + if s.EnableUserManagedAttributes == nil { + s.EnableUserManagedAttributes = NewPointer(false) + } +} + +type ConfigFunc func() *Config + +const ( + ConfigAccessTagType = "access" + ConfigAccessTagWriteRestrictable = "write_restrictable" + ConfigAccessTagCloudRestrictable = "cloud_restrictable" +) + +// Allows read access if any PermissionSysconsoleRead* is allowed +const ConfigAccessTagAnySysConsoleRead = "*_read" + +// Config fields support the 'access' tag with the following values corresponding to the suffix of the associated +// PermissionSysconsole* permission Id: 'about', 'reporting', 'user_management_users', +// 'user_management_groups', 'user_management_teams', 'user_management_channels', +// 'user_management_permissions', 'environment_web_server', 'environment_database', 'environment_elasticsearch', +// 'environment_file_storage', 'environment_image_proxy', 'environment_smtp', 'environment_push_notification_server', +// 'environment_high_availability', 'environment_rate_limiting', 'environment_logging', 'environment_session_lengths', +// 'environment_performance_monitoring', 'environment_developer', 'site', 'authentication', 'plugins', +// 'integrations', 'compliance', 'plugins', and 'experimental'. They grant read and/or write access to the config field +// to roles without PermissionManageSystem. +// +// The 'access' tag '*_read' checks for any Sysconsole read permission and grants access if any read permission is allowed. +// +// By default config values can be written with PermissionManageSystem, but if ExperimentalSettings.RestrictSystemAdmin is true +// and the access tag contains the value 'write_restrictable', then even PermissionManageSystem, does not grant write access +// unless the request is made using local mode. +// +// PermissionManageSystem always grants read access. +// +// Config values with the access tag 'cloud_restrictable' mean that are marked to be filtered when it's used in a cloud licensed +// environment with ExperimentalSettings.RestrictedSystemAdmin set to true. +// +// Example: +// +// type HairSettings struct { +// // Colour is writeable with either PermissionSysconsoleWriteReporting or PermissionSysconsoleWriteUserManagementGroups. +// // It is readable by PermissionSysconsoleReadReporting and PermissionSysconsoleReadUserManagementGroups permissions. +// // PermissionManageSystem grants read and write access. +// Colour string `access:"reporting,user_management_groups"` +// +// // Length is only readable and writable via PermissionManageSystem. +// Length string +// +// // Product is only writeable by PermissionManageSystem if ExperimentalSettings.RestrictSystemAdmin is false. +// // PermissionManageSystem can always read the value. +// Product bool `access:write_restrictable` +// } +type Config struct { + ServiceSettings ServiceSettings + TeamSettings TeamSettings + ClientRequirements ClientRequirements + SqlSettings SqlSettings + LogSettings LogSettings + ExperimentalAuditSettings ExperimentalAuditSettings + PasswordSettings PasswordSettings + FileSettings FileSettings + EmailSettings EmailSettings + RateLimitSettings RateLimitSettings + PrivacySettings PrivacySettings + SupportSettings SupportSettings + AnnouncementSettings AnnouncementSettings + ThemeSettings ThemeSettings + GitLabSettings SSOSettings + GoogleSettings SSOSettings + Office365Settings Office365Settings + OpenIdSettings SSOSettings + LdapSettings LdapSettings + ComplianceSettings ComplianceSettings + LocalizationSettings LocalizationSettings + SamlSettings SamlSettings + NativeAppSettings NativeAppSettings + CacheSettings CacheSettings + ClusterSettings ClusterSettings + MetricsSettings MetricsSettings + ExperimentalSettings ExperimentalSettings + AnalyticsSettings AnalyticsSettings + ElasticsearchSettings ElasticsearchSettings + DataRetentionSettings DataRetentionSettings + MessageExportSettings MessageExportSettings + JobSettings JobSettings + PluginSettings PluginSettings + DisplaySettings DisplaySettings + GuestAccountsSettings GuestAccountsSettings + ImageProxySettings ImageProxySettings + CloudSettings CloudSettings // telemetry: none + FeatureFlags *FeatureFlags `access:"*_read" json:",omitempty"` + ImportSettings ImportSettings // telemetry: none + ExportSettings ExportSettings + WranglerSettings WranglerSettings + ConnectedWorkspacesSettings ConnectedWorkspacesSettings + AccessControlSettings AccessControlSettings + ContentFlaggingSettings ContentFlaggingSettings + AutoTranslationSettings AutoTranslationSettings +} + +func (o *Config) Auditable() map[string]any { + return map[string]any{ + // TODO + } +} + +func (o *Config) Clone() *Config { + buf, err := json.Marshal(o) + if err != nil { + panic(err) + } + var ret Config + err = json.Unmarshal(buf, &ret) + if err != nil { + panic(err) + } + return &ret +} + +func (o *Config) ToJSONFiltered(tagType, tagValue string) ([]byte, error) { + filteredConfigMap := configToMapFilteredByTag(*o, tagType, tagValue) + for key, value := range filteredConfigMap { + v, ok := value.(map[string]any) + if ok && len(v) == 0 { + delete(filteredConfigMap, key) + } + } + return json.Marshal(filteredConfigMap) +} + +func (o *Config) GetSSOService(service string) *SSOSettings { + switch service { + case ServiceGitlab: + return &o.GitLabSettings + case ServiceGoogle: + return &o.GoogleSettings + case ServiceOffice365: + return o.Office365Settings.SSOSettings() + case ServiceOpenid: + return &o.OpenIdSettings + } + + return nil +} + +func ConfigFromJSON(data io.Reader) *Config { + var o *Config + json.NewDecoder(data).Decode(&o) + return o +} + +// isUpdate detects a pre-existing config based on whether SiteURL has been changed +func (o *Config) isUpdate() bool { + return o.ServiceSettings.SiteURL != nil +} + +func (o *Config) SetDefaults() { + isUpdate := o.isUpdate() + + o.LdapSettings.SetDefaults() + o.SamlSettings.SetDefaults() + + if o.TeamSettings.TeammateNameDisplay == nil { + o.TeamSettings.TeammateNameDisplay = NewPointer(ShowUsername) + + if *o.SamlSettings.Enable || *o.LdapSettings.Enable { + *o.TeamSettings.TeammateNameDisplay = ShowFullName + } + } + + o.SqlSettings.SetDefaults(isUpdate) + o.FileSettings.SetDefaults(isUpdate) + o.EmailSettings.SetDefaults(isUpdate) + o.PrivacySettings.setDefaults() + o.Office365Settings.setDefaults() + o.Office365Settings.setDefaults() + o.GitLabSettings.setDefaults("", "", "", "", "") + o.GoogleSettings.setDefaults(GoogleSettingsDefaultScope, GoogleSettingsDefaultAuthEndpoint, GoogleSettingsDefaultTokenEndpoint, GoogleSettingsDefaultUserAPIEndpoint, "") + o.OpenIdSettings.setDefaults(OpenidSettingsDefaultScope, "", "", "", "#145DBF") + o.ServiceSettings.SetDefaults(isUpdate) + o.PasswordSettings.SetDefaults() + o.TeamSettings.SetDefaults() + o.MetricsSettings.SetDefaults() + o.ExperimentalSettings.SetDefaults() + o.SupportSettings.SetDefaults() + o.AnnouncementSettings.SetDefaults() + o.ThemeSettings.SetDefaults() + o.CacheSettings.SetDefaults() + o.ClusterSettings.SetDefaults() + o.PluginSettings.SetDefaults(o.LogSettings) + o.AnalyticsSettings.SetDefaults() + o.ComplianceSettings.SetDefaults() + o.LocalizationSettings.SetDefaults() + o.AutoTranslationSettings.SetDefaults() + o.ElasticsearchSettings.SetDefaults() + o.NativeAppSettings.SetDefaults() + o.DataRetentionSettings.SetDefaults() + o.RateLimitSettings.SetDefaults() + o.LogSettings.SetDefaults() + o.ExperimentalAuditSettings.SetDefaults() + o.JobSettings.SetDefaults() + o.MessageExportSettings.SetDefaults() + o.DisplaySettings.SetDefaults() + o.GuestAccountsSettings.SetDefaults() + o.ImageProxySettings.SetDefaults() + o.CloudSettings.SetDefaults() + if o.FeatureFlags == nil { + o.FeatureFlags = &FeatureFlags{} + o.FeatureFlags.SetDefaults() + } + o.ImportSettings.SetDefaults() + o.ExportSettings.SetDefaults() + o.WranglerSettings.SetDefaults() + o.ConnectedWorkspacesSettings.SetDefaults(isUpdate, o.ExperimentalSettings) + o.AccessControlSettings.SetDefaults() + o.ContentFlaggingSettings.SetDefaults() +} + +func (o *Config) IsValid() *AppError { + if *o.ServiceSettings.SiteURL == "" && *o.EmailSettings.EnableEmailBatching { + return NewAppError("Config.IsValid", "model.config.is_valid.site_url_email_batching.app_error", nil, "", http.StatusBadRequest) + } + + if *o.ClusterSettings.Enable && *o.EmailSettings.EnableEmailBatching { + return NewAppError("Config.IsValid", "model.config.is_valid.cluster_email_batching.app_error", nil, "", http.StatusBadRequest) + } + + if appErr := o.MetricsSettings.isValid(); appErr != nil { + return appErr + } + + if appErr := o.CacheSettings.isValid(); appErr != nil { + return appErr + } + + if *o.ServiceSettings.SiteURL == "" && *o.ServiceSettings.AllowCookiesForSubdomains { + return NewAppError("Config.IsValid", "model.config.is_valid.allow_cookies_for_subdomains.app_error", nil, "", http.StatusBadRequest) + } + + if appErr := o.TeamSettings.isValid(); appErr != nil { + return appErr + } + + if appErr := o.ExperimentalSettings.isValid(); appErr != nil { + return appErr + } + + if appErr := o.SqlSettings.isValid(); appErr != nil { + return appErr + } + + if appErr := o.FileSettings.isValid(); appErr != nil { + return appErr + } + + if appErr := o.EmailSettings.isValid(); appErr != nil { + return appErr + } + + if appErr := o.LdapSettings.isValid(); appErr != nil { + return appErr + } + + if appErr := o.SamlSettings.isValid(); appErr != nil { + return appErr + } + + if *o.PasswordSettings.MinimumLength < PasswordMinimumLength || *o.PasswordSettings.MinimumLength > PasswordMaximumLength { + return NewAppError("Config.IsValid", "model.config.is_valid.password_length.app_error", map[string]any{"MinLength": PasswordMinimumLength, "MaxLength": PasswordMaximumLength}, "", http.StatusBadRequest) + } + + if appErr := o.RateLimitSettings.isValid(); appErr != nil { + return appErr + } + + if appErr := o.ServiceSettings.isValid(); appErr != nil { + return appErr + } + + if appErr := o.ElasticsearchSettings.isValid(); appErr != nil { + return appErr + } + + if appErr := o.DataRetentionSettings.isValid(); appErr != nil { + return appErr + } + + if appErr := o.LogSettings.isValid(); appErr != nil { + return appErr + } + + if appErr := o.ExperimentalAuditSettings.isValid(); appErr != nil { + return appErr + } + + if appErr := o.LocalizationSettings.isValid(); appErr != nil { + return appErr + } + + if appErr := o.AutoTranslationSettings.isValid(); appErr != nil { + return appErr + } + + if appErr := o.MessageExportSettings.isValid(); appErr != nil { + return appErr + } + + if appErr := o.DisplaySettings.isValid(); appErr != nil { + return appErr + } + + if appErr := o.ImageProxySettings.isValid(); appErr != nil { + return appErr + } + + if appErr := o.ImportSettings.isValid(); appErr != nil { + return appErr + } + + if appErr := o.WranglerSettings.IsValid(); appErr != nil { + return appErr + } + + if o.SupportSettings.ReportAProblemType != nil { + if *o.SupportSettings.ReportAProblemType == SupportSettingsReportAProblemTypeMail { + if o.SupportSettings.ReportAProblemMail == nil { + return NewAppError("Config.IsValid", "model.config.is_valid.report_a_problem_mail.missing.app_error", nil, "", http.StatusBadRequest) + } + if !IsValidEmail(*o.SupportSettings.ReportAProblemMail) { + return NewAppError("Config.IsValid", "model.config.is_valid.report_a_problem_mail.invalid.app_error", nil, "", http.StatusBadRequest) + } + } + if *o.SupportSettings.ReportAProblemType == SupportSettingsReportAProblemTypeLink { + if o.SupportSettings.ReportAProblemLink == nil { + return NewAppError("Config.IsValid", "model.config.is_valid.report_a_problem_link.missing.app_error", nil, "", http.StatusBadRequest) + } + + if !IsValidHTTPURL(*o.SupportSettings.ReportAProblemLink) { + return NewAppError("Config.IsValid", "model.config.is_valid.report_a_problem_link.invalid.app_error", nil, "", http.StatusBadRequest) + } + } + } + + if appErr := o.ContentFlaggingSettings.IsValid(); appErr != nil { + return appErr + } + + return nil +} + +func (s *TeamSettings) isValid() *AppError { + if *s.MaxUsersPerTeam <= 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.max_users.app_error", nil, "", http.StatusBadRequest) + } + + if *s.MaxChannelsPerTeam <= 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.max_channels.app_error", nil, "", http.StatusBadRequest) + } + + if *s.UserStatusAwayTimeout <= 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.user_status_away_timeout.app_error", nil, "", http.StatusBadRequest) + } + + if *s.MaxNotificationsPerChannel <= 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.max_notify_per_channel.app_error", nil, "", http.StatusBadRequest) + } + + if !(*s.RestrictDirectMessage == DirectMessageAny || *s.RestrictDirectMessage == DirectMessageTeam) { + return NewAppError("Config.IsValid", "model.config.is_valid.restrict_direct_message.app_error", nil, "", http.StatusBadRequest) + } + + if !(*s.TeammateNameDisplay == ShowFullName || *s.TeammateNameDisplay == ShowNicknameFullName || *s.TeammateNameDisplay == ShowUsername) { + return NewAppError("Config.IsValid", "model.config.is_valid.teammate_name_display.app_error", nil, "", http.StatusBadRequest) + } + + if len(*s.SiteName) > SitenameMaxLength { + return NewAppError("Config.IsValid", "model.config.is_valid.sitename_length.app_error", map[string]any{"MaxLength": SitenameMaxLength}, "", http.StatusBadRequest) + } + + if !*s.ExperimentalViewArchivedChannels { + return NewAppError("Config.IsValid", "model.config.is_valid.experimental_view_archived_channels.app_error", nil, "", http.StatusBadRequest) + } + + return nil +} + +func (s *ExperimentalSettings) isValid() *AppError { + if *s.ClientSideCertEnable { + return NewAppError("Config.IsValid", "model.config.is_valid.client_side_cert_enable.app_error", nil, "", http.StatusBadRequest) + } + + if *s.LinkMetadataTimeoutMilliseconds <= 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.link_metadata_timeout.app_error", nil, "", http.StatusBadRequest) + } + + return nil +} + +func (s *SqlSettings) isValid() *AppError { + if *s.AtRestEncryptKey != "" && len(*s.AtRestEncryptKey) < 32 { + return NewAppError("Config.IsValid", "model.config.is_valid.encrypt_sql.app_error", nil, "", http.StatusBadRequest) + } + + if *s.DriverName != DatabaseDriverPostgres { + return NewAppError("Config.IsValid", "model.config.is_valid.sql_driver.app_error", nil, "", http.StatusBadRequest) + } + + if *s.MaxIdleConns <= 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.sql_idle.app_error", nil, "", http.StatusBadRequest) + } + + if *s.ConnMaxLifetimeMilliseconds < 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.sql_conn_max_lifetime_milliseconds.app_error", nil, "", http.StatusBadRequest) + } + + if *s.ConnMaxIdleTimeMilliseconds < 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.sql_conn_max_idle_time_milliseconds.app_error", nil, "", http.StatusBadRequest) + } + + if *s.QueryTimeout <= 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.sql_query_timeout.app_error", nil, "", http.StatusBadRequest) + } + + if *s.DataSource == "" { + return NewAppError("Config.IsValid", "model.config.is_valid.sql_data_src.app_error", nil, "", http.StatusBadRequest) + } + + if *s.MaxOpenConns <= 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.sql_max_conn.app_error", nil, "", http.StatusBadRequest) + } + + return nil +} + +func (s *FileSettings) isValid() *AppError { + if *s.MaxFileSize <= 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.max_file_size.app_error", nil, "", http.StatusBadRequest) + } + + if !(*s.DriverName == ImageDriverLocal || *s.DriverName == ImageDriverS3) { + return NewAppError("Config.IsValid", "model.config.is_valid.file_driver.app_error", nil, "", http.StatusBadRequest) + } + + if *s.PublicLinkSalt != "" && len(*s.PublicLinkSalt) < 32 { + return NewAppError("Config.IsValid", "model.config.is_valid.file_salt.app_error", nil, "", http.StatusBadRequest) + } + + if *s.Directory == "" { + return NewAppError("Config.IsValid", "model.config.is_valid.directory.app_error", nil, "", http.StatusBadRequest) + } + + // Check for leading/trailing whitespace in directory path + if strings.TrimSpace(*s.Directory) != *s.Directory { + return NewAppError("Config.IsValid", "model.config.is_valid.directory_whitespace.app_error", map[string]any{"Setting": "FileSettings.Directory", "Value": *s.Directory}, "", http.StatusBadRequest) + } + + if *s.MaxImageDecoderConcurrency < -1 || *s.MaxImageDecoderConcurrency == 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.image_decoder_concurrency.app_error", map[string]any{"Value": *s.MaxImageDecoderConcurrency}, "", http.StatusBadRequest) + } + + if *s.AmazonS3RequestTimeoutMilliseconds <= 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.amazons3_timeout.app_error", map[string]any{"Value": *s.MaxImageDecoderConcurrency}, "", http.StatusBadRequest) + } + + if *s.AmazonS3StorageClass != "" && !slices.Contains([]string{StorageClassStandard, StorageClassReducedRedundancy, StorageClassStandardIA, StorageClassOnezoneIA, StorageClassIntelligentTiering, StorageClassGlacier, StorageClassDeepArchive, StorageClassOutposts, StorageClassGlacierIR, StorageClassSnow, StorageClassExpressOnezone}, *s.AmazonS3StorageClass) { + return NewAppError("Config.IsValid", "model.config.is_valid.storage_class.app_error", map[string]any{"Value": *s.AmazonS3StorageClass}, "", http.StatusBadRequest) + } + + if strings.TrimSpace(*s.AmazonS3PathPrefix) != *s.AmazonS3PathPrefix { + return NewAppError("Config.IsValid", "model.config.is_valid.directory_whitespace.app_error", map[string]any{"Setting": "FileSettings.AmazonS3PathPrefix", "Value": *s.AmazonS3PathPrefix}, "", http.StatusBadRequest) + } + + if *s.ExportAmazonS3StorageClass != "" && !slices.Contains([]string{StorageClassStandard, StorageClassReducedRedundancy, StorageClassStandardIA, StorageClassOnezoneIA, StorageClassIntelligentTiering, StorageClassGlacier, StorageClassDeepArchive, StorageClassOutposts, StorageClassGlacierIR, StorageClassSnow, StorageClassExpressOnezone}, *s.ExportAmazonS3StorageClass) { + return NewAppError("Config.IsValid", "model.config.is_valid.storage_class.app_error", map[string]any{"Value": *s.ExportAmazonS3StorageClass}, "", http.StatusBadRequest) + } + + if strings.TrimSpace(*s.ExportAmazonS3PathPrefix) != *s.ExportAmazonS3PathPrefix { + return NewAppError("Config.IsValid", "model.config.is_valid.directory_whitespace.app_error", map[string]any{"Setting": "FileSettings.ExportAmazonS3PathPrefix", "Value": *s.ExportAmazonS3PathPrefix}, "", http.StatusBadRequest) + } + + if strings.TrimSpace(*s.ExportDirectory) != *s.ExportDirectory { + return NewAppError("Config.IsValid", "model.config.is_valid.directory_whitespace.app_error", map[string]any{"Setting": "FileSettings.ExportDirectory", "Value": *s.ExportDirectory}, "", http.StatusBadRequest) + } + + return nil +} + +func (s *EmailSettings) isValid() *AppError { + if !(*s.ConnectionSecurity == ConnSecurityNone || *s.ConnectionSecurity == ConnSecurityTLS || *s.ConnectionSecurity == ConnSecurityStarttls || *s.ConnectionSecurity == ConnSecurityPlain) { + return NewAppError("Config.IsValid", "model.config.is_valid.email_security.app_error", nil, "", http.StatusBadRequest) + } + + if *s.EmailBatchingBufferSize <= 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.email_batching_buffer_size.app_error", nil, "", http.StatusBadRequest) + } + + if *s.EmailBatchingInterval < 30 { + return NewAppError("Config.IsValid", "model.config.is_valid.email_batching_interval.app_error", nil, "", http.StatusBadRequest) + } + + if !(*s.EmailNotificationContentsType == EmailNotificationContentsFull || *s.EmailNotificationContentsType == EmailNotificationContentsGeneric) { + return NewAppError("Config.IsValid", "model.config.is_valid.email_notification_contents_type.app_error", nil, "", http.StatusBadRequest) + } + + return nil +} + +func (s *RateLimitSettings) isValid() *AppError { + if *s.MemoryStoreSize <= 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.rate_mem.app_error", nil, "", http.StatusBadRequest) + } + + if *s.PerSec <= 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.rate_sec.app_error", nil, "", http.StatusBadRequest) + } + + if *s.MaxBurst <= 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.max_burst.app_error", nil, "", http.StatusBadRequest) + } + + return nil +} + +func (s *LdapSettings) isValid() *AppError { + if !(*s.ConnectionSecurity == ConnSecurityNone || *s.ConnectionSecurity == ConnSecurityTLS || *s.ConnectionSecurity == ConnSecurityStarttls) { + return NewAppError("Config.IsValid", "model.config.is_valid.ldap_security.app_error", nil, "", http.StatusBadRequest) + } + + if *s.SyncIntervalMinutes <= 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.ldap_sync_interval.app_error", nil, "", http.StatusBadRequest) + } + + if *s.MaxPageSize < 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.ldap_max_page_size.app_error", nil, "", http.StatusBadRequest) + } + + if *s.Enable { + if *s.LdapServer == "" { + return NewAppError("Config.IsValid", "model.config.is_valid.ldap_server", nil, "", http.StatusBadRequest) + } + + if *s.MaximumLoginAttempts <= 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.ldap_max_login_attempts.app_error", nil, "", http.StatusBadRequest) + } + + if *s.BaseDN == "" { + return NewAppError("Config.IsValid", "model.config.is_valid.ldap_basedn", nil, "", http.StatusBadRequest) + } + + if *s.EmailAttribute == "" { + return NewAppError("Config.IsValid", "model.config.is_valid.ldap_email", nil, "", http.StatusBadRequest) + } + + if *s.UsernameAttribute == "" { + return NewAppError("Config.IsValid", "model.config.is_valid.ldap_username", nil, "", http.StatusBadRequest) + } + + if *s.IdAttribute == "" { + return NewAppError("Config.IsValid", "model.config.is_valid.ldap_id", nil, "", http.StatusBadRequest) + } + + if *s.LoginIdAttribute == "" { + return NewAppError("Config.IsValid", "model.config.is_valid.ldap_login_id", nil, "", http.StatusBadRequest) + } + + if *s.UserFilter != "" { + if _, err := ldap.CompileFilter(*s.UserFilter); err != nil { + return NewAppError("ValidateFilter", "ent.ldap.validate_filter.app_error", nil, "", http.StatusBadRequest).Wrap(err) + } + } + + if *s.GuestFilter != "" { + if _, err := ldap.CompileFilter(*s.GuestFilter); err != nil { + return NewAppError("LdapSettings.isValid", "ent.ldap.validate_guest_filter.app_error", nil, "", http.StatusBadRequest).Wrap(err) + } + } + + if *s.AdminFilter != "" { + if _, err := ldap.CompileFilter(*s.AdminFilter); err != nil { + return NewAppError("LdapSettings.isValid", "ent.ldap.validate_admin_filter.app_error", nil, "", http.StatusBadRequest).Wrap(err) + } + } + } + + return nil +} + +func (s *SamlSettings) isValid() *AppError { + if *s.Enable { + if *s.IdpURL == "" || !IsValidHTTPURL(*s.IdpURL) { + return NewAppError("Config.IsValid", "model.config.is_valid.saml_idp_url.app_error", nil, "", http.StatusBadRequest) + } + + if *s.IdpDescriptorURL == "" { + return NewAppError("Config.IsValid", "model.config.is_valid.saml_idp_descriptor_url.app_error", nil, "", http.StatusBadRequest) + } + + if *s.IdpCertificateFile == "" { + return NewAppError("Config.IsValid", "model.config.is_valid.saml_idp_cert.app_error", nil, "", http.StatusBadRequest) + } + + if *s.EmailAttribute == "" { + return NewAppError("Config.IsValid", "model.config.is_valid.saml_email_attribute.app_error", nil, "", http.StatusBadRequest) + } + + if *s.UsernameAttribute == "" { + return NewAppError("Config.IsValid", "model.config.is_valid.saml_username_attribute.app_error", nil, "", http.StatusBadRequest) + } + + if *s.ServiceProviderIdentifier == "" { + return NewAppError("Config.IsValid", "model.config.is_valid.saml_spidentifier_attribute.app_error", nil, "", http.StatusBadRequest) + } + + if *s.Verify { + if *s.AssertionConsumerServiceURL == "" || !IsValidHTTPURL(*s.AssertionConsumerServiceURL) { + return NewAppError("Config.IsValid", "model.config.is_valid.saml_assertion_consumer_service_url.app_error", nil, "", http.StatusBadRequest) + } + } + + if *s.Encrypt { + if *s.PrivateKeyFile == "" { + return NewAppError("Config.IsValid", "model.config.is_valid.saml_private_key.app_error", nil, "", http.StatusBadRequest) + } + + if *s.PublicCertificateFile == "" { + return NewAppError("Config.IsValid", "model.config.is_valid.saml_public_cert.app_error", nil, "", http.StatusBadRequest) + } + } + + if *s.EmailAttribute == "" { + return NewAppError("Config.IsValid", "model.config.is_valid.saml_email_attribute.app_error", nil, "", http.StatusBadRequest) + } + + if !(*s.SignatureAlgorithm == SamlSettingsSignatureAlgorithmSha1 || *s.SignatureAlgorithm == SamlSettingsSignatureAlgorithmSha256 || *s.SignatureAlgorithm == SamlSettingsSignatureAlgorithmSha512) { + return NewAppError("Config.IsValid", "model.config.is_valid.saml_signature_algorithm.app_error", nil, "", http.StatusBadRequest) + } + if !(*s.CanonicalAlgorithm == SamlSettingsCanonicalAlgorithmC14n || *s.CanonicalAlgorithm == SamlSettingsCanonicalAlgorithmC14n11) { + return NewAppError("Config.IsValid", "model.config.is_valid.saml_canonical_algorithm.app_error", nil, "", http.StatusBadRequest) + } + + if *s.GuestAttribute != "" { + if !(strings.Contains(*s.GuestAttribute, "=")) { + return NewAppError("Config.IsValid", "model.config.is_valid.saml_guest_attribute.app_error", nil, "", http.StatusBadRequest) + } + if len(strings.Split(*s.GuestAttribute, "=")) != 2 { + return NewAppError("Config.IsValid", "model.config.is_valid.saml_guest_attribute.app_error", nil, "", http.StatusBadRequest) + } + } + + if *s.AdminAttribute != "" { + if !(strings.Contains(*s.AdminAttribute, "=")) { + return NewAppError("Config.IsValid", "model.config.is_valid.saml_admin_attribute.app_error", nil, "", http.StatusBadRequest) + } + if len(strings.Split(*s.AdminAttribute, "=")) != 2 { + return NewAppError("Config.IsValid", "model.config.is_valid.saml_admin_attribute.app_error", nil, "", http.StatusBadRequest) + } + } + } + + return nil +} + +func (s *ServiceSettings) isValid() *AppError { + if !(*s.ConnectionSecurity == ConnSecurityNone || *s.ConnectionSecurity == ConnSecurityTLS) { + return NewAppError("Config.IsValid", "model.config.is_valid.webserver_security.app_error", nil, "", http.StatusBadRequest) + } + + if *s.ConnectionSecurity == ConnSecurityTLS && !*s.UseLetsEncrypt { + appErr := NewAppError("Config.IsValid", "model.config.is_valid.tls_cert_file_missing.app_error", nil, "", http.StatusBadRequest) + + if *s.TLSCertFile == "" { + return appErr + } else if _, err := os.Stat(*s.TLSCertFile); os.IsNotExist(err) { + return appErr + } + + appErr = NewAppError("Config.IsValid", "model.config.is_valid.tls_key_file_missing.app_error", nil, "", http.StatusBadRequest) + + if *s.TLSKeyFile == "" { + return appErr + } else if _, err := os.Stat(*s.TLSKeyFile); os.IsNotExist(err) { + return appErr + } + } + + if len(s.TLSOverwriteCiphers) > 0 { + for _, cipher := range s.TLSOverwriteCiphers { + if _, ok := ServerTLSSupportedCiphers[cipher]; !ok { + return NewAppError("Config.IsValid", "model.config.is_valid.tls_overwrite_cipher.app_error", map[string]any{"name": cipher}, "", http.StatusBadRequest) + } + } + } + + if *s.MaximumPayloadSizeBytes <= 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.max_payload_size.app_error", nil, "", http.StatusBadRequest) + } + + if *s.MaximumURLLength <= 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.max_url_length.app_error", nil, "", http.StatusBadRequest) + } + + if *s.ReadTimeout <= 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.read_timeout.app_error", nil, "", http.StatusBadRequest) + } + + if *s.WriteTimeout <= 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.write_timeout.app_error", nil, "", http.StatusBadRequest) + } + + if *s.TimeBetweenUserTypingUpdatesMilliseconds < 1000 { + return NewAppError("Config.IsValid", "model.config.is_valid.time_between_user_typing.app_error", nil, "", http.StatusBadRequest) + } + + if *s.MaximumLoginAttempts <= 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.login_attempts.app_error", nil, "", http.StatusBadRequest) + } + + if *s.SiteURL != "" { + if _, err := url.ParseRequestURI(*s.SiteURL); err != nil { + return NewAppError("Config.IsValid", "model.config.is_valid.site_url.app_error", nil, "", http.StatusBadRequest).Wrap(err) + } + } + + if *s.WebsocketURL != "" { + if _, err := url.ParseRequestURI(*s.WebsocketURL); err != nil { + return NewAppError("Config.IsValid", "model.config.is_valid.websocket_url.app_error", nil, "", http.StatusBadRequest).Wrap(err) + } + } + + host, port, _ := net.SplitHostPort(*s.ListenAddress) + var isValidHost bool + if host == "" { + isValidHost = true + } else { + isValidHost = (net.ParseIP(host) != nil) || isDomainName(host) + } + portInt, err := strconv.Atoi(port) + if err != nil || !isValidHost || portInt < 0 || portInt > math.MaxUint16 { + return NewAppError("Config.IsValid", "model.config.is_valid.listen_address.app_error", nil, "", http.StatusBadRequest) + } + + if *s.OutgoingIntegrationRequestsTimeout <= 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.outgoing_integrations_request_timeout.app_error", nil, "", http.StatusBadRequest) + } + + if *s.ExperimentalGroupUnreadChannels != GroupUnreadChannelsDisabled && + *s.ExperimentalGroupUnreadChannels != GroupUnreadChannelsDefaultOn && + *s.ExperimentalGroupUnreadChannels != GroupUnreadChannelsDefaultOff { + return NewAppError("Config.IsValid", "model.config.is_valid.group_unread_channels.app_error", nil, "", http.StatusBadRequest) + } + + if *s.CollapsedThreads != CollapsedThreadsDisabled && !*s.ThreadAutoFollow { + return NewAppError("Config.IsValid", "model.config.is_valid.collapsed_threads.autofollow.app_error", nil, "", http.StatusBadRequest) + } + + if *s.CollapsedThreads != CollapsedThreadsDisabled && + *s.CollapsedThreads != CollapsedThreadsDefaultOn && + *s.CollapsedThreads != CollapsedThreadsAlwaysOn && + *s.CollapsedThreads != CollapsedThreadsDefaultOff { + return NewAppError("Config.IsValid", "model.config.is_valid.collapsed_threads.app_error", nil, "", http.StatusBadRequest) + } + + if *s.PersistentNotificationIntervalMinutes < 2 { + return NewAppError("Config.IsValid", "model.config.is_valid.persistent_notifications_interval.app_error", nil, "", http.StatusBadRequest) + } + if *s.PersistentNotificationMaxCount <= 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.persistent_notifications_count.app_error", nil, "", http.StatusBadRequest) + } + if *s.PersistentNotificationMaxRecipients <= 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.persistent_notifications_recipients.app_error", nil, "", http.StatusBadRequest) + } + + // we check if file has a valid parent, the server will try to create the socket + // file if it doesn't exist, but we need to be sure if the directory exist or not + if *s.EnableLocalMode { + parent := filepath.Dir(*s.LocalModeSocketLocation) + _, err := os.Stat(parent) + if err != nil { + return NewAppError("Config.IsValid", "model.config.is_valid.local_mode_socket.app_error", nil, err.Error(), http.StatusBadRequest).Wrap(err) + } + } + + return nil +} + +func (s *ElasticsearchSettings) isValid() *AppError { + if *s.EnableIndexing { + if *s.ConnectionURL == "" { + return NewAppError("Config.IsValid", "model.config.is_valid.elastic_search.connection_url.app_error", nil, "", http.StatusBadRequest) + } + } + + if *s.EnableSearching && !*s.EnableIndexing { + return NewAppError("Config.IsValid", "model.config.is_valid.elastic_search.enable_searching.app_error", map[string]any{ + "Searching": "ElasticsearchSettings.EnableSearching", + "EnableIndexing": "ElasticsearchSettings.EnableIndexing", + }, "", http.StatusBadRequest) + } + + if *s.EnableAutocomplete && !*s.EnableIndexing { + return NewAppError("Config.IsValid", "model.config.is_valid.elastic_search.enable_autocomplete.app_error", map[string]any{ + "Autocomplete": "ElasticsearchSettings.EnableAutocomplete", + "EnableIndexing": "ElasticsearchSettings.EnableIndexing", + }, "", http.StatusBadRequest) + } + + if *s.AggregatePostsAfterDays < 1 { + return NewAppError("Config.IsValid", "model.config.is_valid.elastic_search.aggregate_posts_after_days.app_error", nil, "", http.StatusBadRequest) + } + + if _, err := time.Parse("15:04", *s.PostsAggregatorJobStartTime); err != nil { + return NewAppError("Config.IsValid", "model.config.is_valid.elastic_search.posts_aggregator_job_start_time.app_error", nil, "", http.StatusBadRequest).Wrap(err) + } + + if *s.LiveIndexingBatchSize < 1 { + return NewAppError("Config.IsValid", "model.config.is_valid.elastic_search.live_indexing_batch_size.app_error", nil, "", http.StatusBadRequest) + } + + minBatchSize := 1 + if *s.BatchSize < minBatchSize { + return NewAppError("Config.IsValid", "model.config.is_valid.elastic_search.bulk_indexing_batch_size.app_error", map[string]any{"BatchSize": minBatchSize}, "", http.StatusBadRequest) + } + + if *s.RequestTimeoutSeconds < 1 { + return NewAppError("Config.IsValid", "model.config.is_valid.elastic_search.request_timeout_seconds.app_error", nil, "", http.StatusBadRequest) + } + + if ign := *s.IgnoredPurgeIndexes; ign != "" { + s := strings.SplitSeq(ign, ",") + for ix := range s { + if strings.HasPrefix(ix, "-") { + return NewAppError("Config.IsValid", "model.config.is_valid.elastic_search.ignored_indexes_dash_prefix.app_error", nil, "", http.StatusBadRequest) + } + } + } + + if *s.Backend != ElasticsearchSettingsOSBackend && *s.Backend != ElasticsearchSettingsESBackend { + return NewAppError("Config.IsValid", "model.config.is_valid.elastic_search.invalid_backend.app_error", nil, "", http.StatusBadRequest) + } + + if *s.GlobalSearchPrefix != "" && *s.IndexPrefix == "" { + return NewAppError("Config.IsValid", "model.config.is_valid.elastic_search.empty_index_prefix.app_error", nil, "", http.StatusBadRequest) + } + + if *s.GlobalSearchPrefix != "" && *s.IndexPrefix != "" { + if !strings.HasPrefix(*s.IndexPrefix, *s.GlobalSearchPrefix) { + return NewAppError("Config.IsValid", "model.config.is_valid.elastic_search.incorrect_search_prefix.app_error", map[string]any{"IndexPrefix": *s.IndexPrefix, "GlobalSearchPrefix": *s.GlobalSearchPrefix}, "", http.StatusBadRequest) + } + } + + return nil +} + +func (s *DataRetentionSettings) isValid() *AppError { + if s.MessageRetentionDays == nil || *s.MessageRetentionDays < 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.data_retention.message_retention_days_too_low.app_error", nil, "", http.StatusBadRequest) + } + + if s.MessageRetentionHours == nil || *s.MessageRetentionHours < 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.data_retention.message_retention_hours_too_low.app_error", nil, "", http.StatusBadRequest) + } + + if s.FileRetentionDays == nil || *s.FileRetentionDays < 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.data_retention.file_retention_days_too_low.app_error", nil, "", http.StatusBadRequest) + } + + if s.FileRetentionHours == nil || *s.FileRetentionHours < 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.data_retention.file_retention_hours_too_low.app_error", nil, "", http.StatusBadRequest) + } + + if *s.MessageRetentionDays > 0 && *s.MessageRetentionHours > 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.data_retention.message_retention_misconfiguration.app_error", nil, "", http.StatusBadRequest) + } + + if *s.FileRetentionDays > 0 && *s.FileRetentionHours > 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.data_retention.file_retention_misconfiguration.app_error", nil, "", http.StatusBadRequest) + } + + if *s.MessageRetentionDays == 0 && *s.MessageRetentionHours == 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.data_retention.message_retention_both_zero.app_error", nil, "", http.StatusBadRequest) + } + + if *s.FileRetentionDays == 0 && *s.FileRetentionHours == 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.data_retention.file_retention_both_zero.app_error", nil, "", http.StatusBadRequest) + } + + if _, err := time.Parse("15:04", *s.DeletionJobStartTime); err != nil { + return NewAppError("Config.IsValid", "model.config.is_valid.data_retention.deletion_job_start_time.app_error", nil, "", http.StatusBadRequest).Wrap(err) + } + + return nil +} + +func (s *AutoTranslationSettings) isValid() *AppError { + if s.Enable == nil || !*s.Enable { + return nil + } + + if *s.Provider == "" { + return NewAppError("Config.IsValid", "model.config.is_valid.autotranslation.provider.app_error", nil, "", http.StatusBadRequest) + } + + switch *s.Provider { + case "libretranslate": + if s.LibreTranslate == nil || s.LibreTranslate.URL == nil || *s.LibreTranslate.URL == "" || !IsValidHTTPURL(*s.LibreTranslate.URL) { + return NewAppError("Config.IsValid", "model.config.is_valid.autotranslation.libretranslate.url.app_error", nil, "", http.StatusBadRequest) + } + // TODO: Enable Agents provider in future release + // case "agents": + // if s.Agents == nil || s.Agents.BotUserId == nil || *s.Agents.BotUserId == "" { + // return NewAppError("Config.IsValid", "model.config.is_valid.autotranslation.agents.bot_user_id.app_error", nil, "", http.StatusBadRequest) + // } + default: + return NewAppError("Config.IsValid", "model.config.is_valid.autotranslation.provider.unsupported.app_error", nil, "", http.StatusBadRequest) + } + + // Validate timeouts if set + if s.TimeoutsMs != nil { + if s.TimeoutsMs.NewPost != nil && *s.TimeoutsMs.NewPost <= 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.autotranslation.timeouts.new_post.app_error", nil, "", http.StatusBadRequest) + } + if s.TimeoutsMs.Fetch != nil && *s.TimeoutsMs.Fetch <= 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.autotranslation.timeouts.fetch.app_error", nil, "", http.StatusBadRequest) + } + if s.TimeoutsMs.Notification != nil && *s.TimeoutsMs.Notification <= 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.autotranslation.timeouts.notification.app_error", nil, "", http.StatusBadRequest) + } + } + + return nil +} + +func (s *LocalizationSettings) isValid() *AppError { + if *s.AvailableLocales != "" { + if !strings.Contains(*s.AvailableLocales, *s.DefaultClientLocale) { + return NewAppError("Config.IsValid", "model.config.is_valid.localization.available_locales.app_error", nil, "", http.StatusBadRequest) + } + } + + return nil +} + +func (s *MessageExportSettings) isValid() *AppError { + if s.EnableExport == nil { + return NewAppError("Config.IsValid", "model.config.is_valid.message_export.enable.app_error", nil, "", http.StatusBadRequest) + } + if *s.EnableExport { + if s.ExportFromTimestamp == nil || *s.ExportFromTimestamp < 0 || *s.ExportFromTimestamp > GetMillis() { + return NewAppError("Config.IsValid", "model.config.is_valid.message_export.export_from.app_error", nil, "", http.StatusBadRequest) + } else if s.DailyRunTime == nil { + return NewAppError("Config.IsValid", "model.config.is_valid.message_export.daily_runtime.app_error", nil, "", http.StatusBadRequest) + } else if _, err := time.Parse("15:04", *s.DailyRunTime); err != nil { + return NewAppError("Config.IsValid", "model.config.is_valid.message_export.daily_runtime.app_error", nil, "", http.StatusBadRequest).Wrap(err) + } else if s.BatchSize == nil || *s.BatchSize < 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.message_export.batch_size.app_error", nil, "", http.StatusBadRequest) + } else if s.ExportFormat == nil || (*s.ExportFormat != ComplianceExportTypeActiance && *s.ExportFormat != ComplianceExportTypeGlobalrelay && *s.ExportFormat != ComplianceExportTypeCsv && *s.ExportFormat != ComplianceExportTypeGlobalrelayZip) { + return NewAppError("Config.IsValid", "model.config.is_valid.message_export.export_type.app_error", nil, "", http.StatusBadRequest) + } + + if *s.ExportFormat == ComplianceExportTypeGlobalrelay { + if s.GlobalRelaySettings == nil { + return NewAppError("Config.IsValid", "model.config.is_valid.message_export.global_relay.config_missing.app_error", nil, "", http.StatusBadRequest) + } else if s.GlobalRelaySettings.CustomerType == nil || (*s.GlobalRelaySettings.CustomerType != GlobalrelayCustomerTypeA9 && *s.GlobalRelaySettings.CustomerType != GlobalrelayCustomerTypeA10 && *s.GlobalRelaySettings.CustomerType != GlobalrelayCustomerTypeCustom) { + return NewAppError("Config.IsValid", "model.config.is_valid.message_export.global_relay.customer_type.app_error", nil, "", http.StatusBadRequest) + } else if *s.GlobalRelaySettings.CustomerType == GlobalrelayCustomerTypeCustom && ((s.GlobalRelaySettings.CustomSMTPServerName == nil || *s.GlobalRelaySettings.CustomSMTPServerName == "") || (s.GlobalRelaySettings.CustomSMTPPort == nil || *s.GlobalRelaySettings.CustomSMTPPort == "")) { + return NewAppError("Config.IsValid", "model.config.is_valid.message_export.global_relay.customer_type_custom.app_error", nil, "", http.StatusBadRequest) + } else if s.GlobalRelaySettings.EmailAddress == nil || !strings.Contains(*s.GlobalRelaySettings.EmailAddress, "@") { + // validating email addresses is hard - just make sure it contains an '@' sign + // see https://stackoverflow.com/questions/201323/using-a-regular-expression-to-validate-an-email-address + return NewAppError("Config.IsValid", "model.config.is_valid.message_export.global_relay.email_address.app_error", nil, "", http.StatusBadRequest) + } else if s.GlobalRelaySettings.SMTPUsername == nil || *s.GlobalRelaySettings.SMTPUsername == "" { + return NewAppError("Config.IsValid", "model.config.is_valid.message_export.global_relay.smtp_username.app_error", nil, "", http.StatusBadRequest) + } else if s.GlobalRelaySettings.SMTPPassword == nil || *s.GlobalRelaySettings.SMTPPassword == "" { + return NewAppError("Config.IsValid", "model.config.is_valid.message_export.global_relay.smtp_password.app_error", nil, "", http.StatusBadRequest) + } + } + } + return nil +} + +func (s *DisplaySettings) isValid() *AppError { + if len(s.CustomURLSchemes) != 0 { + validProtocolPattern := regexp.MustCompile(`(?i)^\s*[A-Za-z][A-Za-z0-9.+-]*\s*$`) + + for _, scheme := range s.CustomURLSchemes { + if !validProtocolPattern.MatchString(scheme) { + return NewAppError( + "Config.IsValid", + "model.config.is_valid.display.custom_url_schemes.app_error", + map[string]any{"Scheme": scheme}, + "", + http.StatusBadRequest, + ) + } + } + } + + return nil +} + +func (s *ImageProxySettings) isValid() *AppError { + if *s.Enable { + switch *s.ImageProxyType { + case ImageProxyTypeLocal: + // No other settings to validate + case ImageProxyTypeAtmosCamo: + if *s.RemoteImageProxyURL == "" { + return NewAppError("Config.IsValid", "model.config.is_valid.atmos_camo_image_proxy_url.app_error", nil, "", http.StatusBadRequest) + } + + if *s.RemoteImageProxyOptions == "" { + return NewAppError("Config.IsValid", "model.config.is_valid.atmos_camo_image_proxy_options.app_error", nil, "", http.StatusBadRequest) + } + default: + return NewAppError("Config.IsValid", "model.config.is_valid.image_proxy_type.app_error", nil, "", http.StatusBadRequest) + } + } + + return nil +} + +// SanitizeOptions specifies options for the [Config.Sanitize] method. +type SanitizeOptions struct { + // PartiallyRedactDataSources, when true, only redacts usernames and passwords + // from data sources, keeping other connection parameters visible. + // When false, replaces the entire data source with FakeSetting. + PartiallyRedactDataSources bool +} + +func (o *Config) GetSanitizeOptions() map[string]bool { + options := map[string]bool{} + options["fullname"] = *o.PrivacySettings.ShowFullName + options["email"] = *o.PrivacySettings.ShowEmailAddress + + return options +} + +// Sanitize removes sensitive information from the configuration object. +// It replaces sensitive fields with [FakeSetting] or sanitizes them. +// +// Parameters: +// - pluginManifests: Plugin manifests for sanitizing plugin settings. +// - opts: Options for controlling sanitization behavior. If nil, defaults are used. See [SanitizeOptions]. +func (o *Config) Sanitize(pluginManifests []*Manifest, opts *SanitizeOptions) { + if opts == nil { + opts = &SanitizeOptions{} + } + + var driverName string + if o.SqlSettings.DriverName != nil { + driverName = *o.SqlSettings.DriverName + } + sanitizeDataSourceField := func(dataSource string, fieldName string) string { + if opts.PartiallyRedactDataSources && driverName != "" { + sanitized, err := SanitizeDataSource(driverName, dataSource) + if err != nil { + mlog.Warn("Failed to sanitize "+fieldName+". Falling back to fully sanitizing the setting.", mlog.Err(err)) + return FakeSetting + } + return sanitized + } + return FakeSetting + } + if o.LdapSettings.BindPassword != nil && *o.LdapSettings.BindPassword != "" { + *o.LdapSettings.BindPassword = FakeSetting + } + + if o.FileSettings.PublicLinkSalt != nil { + *o.FileSettings.PublicLinkSalt = FakeSetting + } + + if o.FileSettings.AmazonS3SecretAccessKey != nil && *o.FileSettings.AmazonS3SecretAccessKey != "" { + *o.FileSettings.AmazonS3SecretAccessKey = FakeSetting + } + + if o.EmailSettings.SMTPPassword != nil && *o.EmailSettings.SMTPPassword != "" { + *o.EmailSettings.SMTPPassword = FakeSetting + } + + if o.GitLabSettings.Secret != nil && *o.GitLabSettings.Secret != "" { + *o.GitLabSettings.Secret = FakeSetting + } + + if o.GoogleSettings.Secret != nil && *o.GoogleSettings.Secret != "" { + *o.GoogleSettings.Secret = FakeSetting + } + + if o.Office365Settings.Secret != nil && *o.Office365Settings.Secret != "" { + *o.Office365Settings.Secret = FakeSetting + } + + if o.OpenIdSettings.Secret != nil && *o.OpenIdSettings.Secret != "" { + *o.OpenIdSettings.Secret = FakeSetting + } + + if o.SqlSettings.DataSource != nil { + *o.SqlSettings.DataSource = sanitizeDataSourceField(*o.SqlSettings.DataSource, "SqlSettings.DataSource") + } + + if o.SqlSettings.AtRestEncryptKey != nil { + *o.SqlSettings.AtRestEncryptKey = FakeSetting + } + + if o.ElasticsearchSettings.Password != nil { + *o.ElasticsearchSettings.Password = FakeSetting + } + + for i := range o.SqlSettings.DataSourceReplicas { + o.SqlSettings.DataSourceReplicas[i] = sanitizeDataSourceField(o.SqlSettings.DataSourceReplicas[i], "SqlSettings.DataSourceReplicas") + } + + for i := range o.SqlSettings.DataSourceSearchReplicas { + o.SqlSettings.DataSourceSearchReplicas[i] = sanitizeDataSourceField(o.SqlSettings.DataSourceSearchReplicas[i], "SqlSettings.DataSourceSearchReplicas") + } + + for i := range o.SqlSettings.ReplicaLagSettings { + if o.SqlSettings.ReplicaLagSettings[i].DataSource != nil { + sanitized := sanitizeDataSourceField(*o.SqlSettings.ReplicaLagSettings[i].DataSource, "SqlSettings.ReplicaLagSettings") + o.SqlSettings.ReplicaLagSettings[i].DataSource = NewPointer(sanitized) + } + } + + if o.MessageExportSettings.GlobalRelaySettings != nil && + o.MessageExportSettings.GlobalRelaySettings.SMTPPassword != nil && + *o.MessageExportSettings.GlobalRelaySettings.SMTPPassword != "" { + *o.MessageExportSettings.GlobalRelaySettings.SMTPPassword = FakeSetting + } + + if o.ServiceSettings.SplitKey != nil { + *o.ServiceSettings.SplitKey = FakeSetting + } + + if o.CacheSettings.RedisPassword != nil { + *o.CacheSettings.RedisPassword = FakeSetting + } + + o.PluginSettings.Sanitize(pluginManifests) +} + +// SanitizeDataSource redacts sensitive information (username and password) from a database +// connection string while preserving other connection parameters. +// +// Parameters: +// - driverName: The database driver name (postgres or mysql) +// - dataSource: The database connection string to sanitize +// +// Returns: +// - The sanitized connection string with username/password replaced by SanitizedPassword +// - An error if the driverName is not supported or if parsing fails +// +// Examples: +// - PostgreSQL: "postgres://user:pass@host:5432/db" -> "postgres://****:****@host:5432/db" +// - MySQL: "user:pass@tcp(host:3306)/db" -> "****:****@tcp(host:3306)/db" +func SanitizeDataSource(driverName, dataSource string) (string, error) { + // Handle empty data source + if dataSource == "" { + return "", nil + } + + switch driverName { + case DatabaseDriverPostgres: + u, err := url.Parse(dataSource) + if err != nil { + return "", err + } + u.User = url.UserPassword(SanitizedPassword, SanitizedPassword) + + // Remove username and password from query string + params := u.Query() + params.Del("user") + params.Del("password") + u.RawQuery = params.Encode() + + // Unescape the URL to make it human-readable + out, err := url.QueryUnescape(u.String()) + if err != nil { + return "", err + } + return out, nil + default: + return "", errors.New("invalid drivername. Not postgres or mysql.") + } +} + +type FilterTag struct { + TagType string + TagName string +} + +type ConfigFilterOptions struct { + GetConfigOptions + TagFilters []FilterTag +} + +type GetConfigOptions struct { + RemoveMasked bool + RemoveDefaults bool +} + +// FilterConfig returns a map[string]any representation of the configuration. +// Also, the function can filter the configuration by the options passed +// in the argument. The options are used to remove the default values, the masked +// values and to filter the configuration by the tags passed in the TagFilters. +func FilterConfig(cfg *Config, opts ConfigFilterOptions) (map[string]any, error) { + if cfg == nil { + return nil, nil + } + + defaultCfg := &Config{} + defaultCfg.SetDefaults() + + filteredCfg, err := cfg.StringMap() + if err != nil { + return nil, err + } + + filteredDefaultCfg, err := defaultCfg.StringMap() + if err != nil { + return nil, err + } + + for i := range opts.TagFilters { + filteredCfg = configToMapFilteredByTag(filteredCfg, opts.TagFilters[i].TagType, opts.TagFilters[i].TagName) + filteredDefaultCfg = configToMapFilteredByTag(filteredDefaultCfg, opts.TagFilters[i].TagType, opts.TagFilters[i].TagName) + } + + if opts.RemoveDefaults { + filteredCfg = stringMapDiff(filteredCfg, filteredDefaultCfg) + } + + if opts.RemoveMasked { + removeFakeSettings(filteredCfg) + } + + // only apply this if we applied some filters + // the alternative is to remove empty maps and slices during the filters + // but having this in a separate step makes it easier to understand + if opts.RemoveDefaults || opts.RemoveMasked || len(opts.TagFilters) > 0 { + removeEmptyMapsAndSlices(filteredCfg) + } + + return filteredCfg, nil +} + +// configToMapFilteredByTag converts a struct into a map removing those fields that has the tag passed +// as argument +// t shall be either a Config struct value or a map[string]any +func configToMapFilteredByTag(t any, typeOfTag, filterTag string) map[string]any { + switch t.(type) { + case map[string]any: + var tc *Config + b, err := json.Marshal(t) + if err != nil { + // since this is an internal function, we can panic here + // because it should never happen + panic(err) + } + json.Unmarshal(b, &tc) + t = *tc + } + + return structToMapFilteredByTag(t, typeOfTag, filterTag) +} + +func structToMapFilteredByTag(t any, typeOfTag, filterTag string) map[string]any { + defer func() { + if r := recover(); r != nil { + mlog.Warn("Panicked in structToMapFilteredByTag. This should never happen.", mlog.Any("recover", r)) + } + }() + + val := reflect.ValueOf(t) + elemField := reflect.TypeOf(t) + + if val.Kind() != reflect.Struct { + return nil + } + + out := map[string]any{} + + for i := 0; i < val.NumField(); i++ { + field := val.Field(i) + + structField := elemField.Field(i) + tagPermissions := strings.Split(structField.Tag.Get(typeOfTag), ",") + if isTagPresent(filterTag, tagPermissions) { + continue + } + + var value any + + switch field.Kind() { + case reflect.Struct: + value = structToMapFilteredByTag(field.Interface(), typeOfTag, filterTag) + case reflect.Ptr: + indirectType := field.Elem() + if indirectType.Kind() == reflect.Struct { + value = structToMapFilteredByTag(indirectType.Interface(), typeOfTag, filterTag) + } else if indirectType.Kind() != reflect.Invalid { + value = indirectType.Interface() + } + default: + value = field.Interface() + } + + out[val.Type().Field(i).Name] = value + } + + return out +} + +// removeEmptyMapsAndSlices removes all the empty maps and slices from a map +func removeEmptyMapsAndSlices(m map[string]any) { + for k, v := range m { + if v == nil { + delete(m, k) + continue + } + + switch vt := v.(type) { + case map[string]any: + removeEmptyMapsAndSlices(vt) + if len(vt) == 0 { + delete(m, k) + } + case []any: + if len(vt) == 0 { + delete(m, k) + } + } + } +} + +// StringMap returns a map[string]any representation of the Config struct +func (o *Config) StringMap() (map[string]any, error) { + b, err := json.Marshal(o) + if err != nil { + return nil, err + } + + var result map[string]any + err = json.Unmarshal(b, &result) + if err != nil { + return nil, err + } + + return result, nil +} + +// stringMapDiff returns the difference between two maps with string keys +func stringMapDiff(m1, m2 map[string]any) map[string]any { + result := make(map[string]any) + + for k, v := range m1 { + if _, ok := m2[k]; !ok { + result[k] = v // ideally this should be never reached + } + + if reflect.DeepEqual(v, m2[k]) { + continue + } + + switch v.(type) { + case map[string]any: + // this happens during the serialization of the struct to map + // so we can safely assume that the type is not matching, there + // is a difference in the values + casted, ok := m2[k].(map[string]any) + if !ok { + result[k] = v + continue + } + res := stringMapDiff(v.(map[string]any), casted) + if len(res) > 0 { + result[k] = res + } + default: + result[k] = v + } + } + + return result +} + +// removeFakeSettings removes all the fields that have the value of FakeSetting +// it's necessary to remove the fields that have been masked to be able to +// export the configuration (and make it importable) +func removeFakeSettings(m map[string]any) { + for k, v := range m { + switch vt := v.(type) { + case map[string]any: + removeFakeSettings(vt) + case string: + if v == FakeSetting { + delete(m, k) + } + } + } +} + +func isTagPresent(tag string, tags []string) bool { + for _, val := range tags { + tagValue := strings.TrimSpace(val) + if tagValue != "" && tagValue == tag { + return true + } + } + + return false +} + +// Copied from https://golang.org/src/net/dnsclient.go#L119 +func isDomainName(s string) bool { + // See RFC 1035, RFC 3696. + // Presentation format has dots before every label except the first, and the + // terminal empty label is optional here because we assume fully-qualified + // (absolute) input. We must therefore reserve space for the first and last + // labels' length octets in wire format, where they are necessary and the + // maximum total length is 255. + // So our _effective_ maximum is 253, but 254 is not rejected if the last + // character is a dot. + l := len(s) + if l == 0 || l > 254 || l == 254 && s[l-1] != '.' { + return false + } + + last := byte('.') + ok := false // Ok once we've seen a letter. + partlen := 0 + for i := 0; i < len(s); i++ { + c := s[i] + switch { + default: + return false + case 'a' <= c && c <= 'z' || 'A' <= c && c <= 'Z' || c == '_': + ok = true + partlen++ + case '0' <= c && c <= '9': + // fine + partlen++ + case c == '-': + // Byte before dash cannot be dot. + if last == '.' { + return false + } + partlen++ + case c == '.': + // Byte before dot cannot be dot, dash. + if last == '.' || last == '-' { + return false + } + if partlen > 63 || partlen == 0 { + return false + } + partlen = 0 + } + last = c + } + if last == '-' || partlen > 63 { + return false + } + + return ok +} + +func isSafeLink(link *string) bool { + if link != nil { + if IsValidHTTPURL(*link) { + return true + } else if strings.HasPrefix(*link, "/") { + return true + } + return false + } + + return true +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/content_flagging.go b/vendor/github.com/mattermost/mattermost/server/public/model/content_flagging.go new file mode 100644 index 00000000..e93a0b8f --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/content_flagging.go @@ -0,0 +1,68 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "net/http" + "slices" + "unicode/utf8" +) + +const ( + ContentFlaggingGroupName = "content_flagging" + ContentFlaggingPostType = PostCustomTypePrefix + "spillage_report" + ContentFlaggingBotUsername = "content-review" + + commentMaxRunes = 1000 + + AsContentReviewerParam = "as_content_reviewer" +) + +const ( + ContentFlaggingStatusPending = "Pending" + ContentFlaggingStatusAssigned = "Assigned" + ContentFlaggingStatusRemoved = "Removed" + ContentFlaggingStatusRetained = "Retained" +) + +type FlagContentRequest struct { + Reason string `json:"reason"` + Comment string `json:"comment,omitempty"` +} + +func (f *FlagContentRequest) IsValid(commentRequired bool, validReasons []string) *AppError { + if f.Reason == "" { + return NewAppError("FlagContentRequest.IsValid", "api.content_flagging.error.reason_required", nil, "", http.StatusBadRequest) + } + + if !slices.Contains(validReasons, f.Reason) { + return NewAppError("FlagContentRequest.IsValid", "api.content_flagging.error.reason_invalid", nil, "", http.StatusBadRequest) + } + + if commentRequired && f.Comment == "" { + return NewAppError("FlagContentRequest.IsValid", "api.content_flagging.error.comment_required", nil, "", http.StatusBadRequest) + } + + if utf8.RuneCountInString(f.Comment) > commentMaxRunes { + return NewAppError("FlagContentRequest.IsValid", "api.content_flagging.error.comment_too_long", map[string]any{"MaxLength": commentMaxRunes}, "", http.StatusBadRequest) + } + + return nil +} + +type FlagContentActionRequest struct { + Comment string `json:"comment,omitempty"` +} + +func (f *FlagContentActionRequest) IsValid(commentRequired bool) *AppError { + if commentRequired && f.Comment == "" { + return NewAppError("FlagContentActionRequest.IsValid", "api.content_flagging.error.comment_required", nil, "", http.StatusBadRequest) + } + + if utf8.RuneCountInString(f.Comment) > commentMaxRunes { + return NewAppError("FlagContentActionRequest.IsValid", "api.content_flagging.error.comment_too_long", map[string]any{"MaxLength": commentMaxRunes}, "", http.StatusBadRequest) + } + + return nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/content_flagging_setting_request.go b/vendor/github.com/mattermost/mattermost/server/public/model/content_flagging_setting_request.go new file mode 100644 index 00000000..ba2b625c --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/content_flagging_setting_request.go @@ -0,0 +1,80 @@ +package model + +import ( + "net/http" +) + +type ReviewSettingsRequest struct { + ReviewerSettings + ReviewerIDsSettings +} + +func (rs *ReviewSettingsRequest) SetDefaults() { + rs.ReviewerSettings.SetDefaults() + rs.ReviewerIDsSettings.SetDefaults() +} + +func (rs *ReviewSettingsRequest) IsValid() *AppError { + additionalReviewersEnabled := *rs.SystemAdminsAsReviewers || *rs.TeamAdminsAsReviewers + + // If common reviewers are enabled, there must be at least one specified reviewer, or additional viewers be specified + if *rs.CommonReviewers && len(rs.CommonReviewerIds) == 0 && !additionalReviewersEnabled { + return NewAppError("Config.IsValid", "model.config.is_valid.content_flagging.common_reviewers_not_set.app_error", nil, "", http.StatusBadRequest) + } + + // if Additional Reviewers are specified, no extra validation is needed in team specific settings as + // settings team reviewers keeping team feature disabled is valid, as well as + // enabling team feature and not specified reviews is fine as well (since Additional Reviewers are set) + if !additionalReviewersEnabled { + for _, setting := range rs.TeamReviewersSetting { + if *setting.Enabled && len(setting.ReviewerIds) == 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.content_flagging.team_reviewers_not_set.app_error", nil, "", http.StatusBadRequest) + } + } + } + + return nil +} + +type ContentFlaggingSettingsRequest struct { + ContentFlaggingSettingsBase + ReviewerSettings *ReviewSettingsRequest +} + +func (cfs *ContentFlaggingSettingsRequest) SetDefaults() { + cfs.ContentFlaggingSettingsBase.SetDefaults() + + if cfs.EnableContentFlagging == nil { + cfs.EnableContentFlagging = NewPointer(false) + } + + if cfs.NotificationSettings == nil { + cfs.NotificationSettings = &ContentFlaggingNotificationSettings{ + EventTargetMapping: make(map[ContentFlaggingEvent][]NotificationTarget), + } + } + + if cfs.ReviewerSettings == nil { + cfs.ReviewerSettings = &ReviewSettingsRequest{} + } + + if cfs.AdditionalSettings == nil { + cfs.AdditionalSettings = &AdditionalContentFlaggingSettings{} + } + + cfs.NotificationSettings.SetDefaults() + cfs.ReviewerSettings.SetDefaults() + cfs.AdditionalSettings.SetDefaults() +} + +func (cfs *ContentFlaggingSettingsRequest) IsValid() *AppError { + if appErr := cfs.ContentFlaggingSettingsBase.IsValid(); appErr != nil { + return appErr + } + + if appErr := cfs.ReviewerSettings.IsValid(); appErr != nil { + return appErr + } + + return nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/content_flagging_settings.go b/vendor/github.com/mattermost/mattermost/server/public/model/content_flagging_settings.go new file mode 100644 index 00000000..c26b32e4 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/content_flagging_settings.go @@ -0,0 +1,233 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "fmt" + "net/http" + "slices" +) + +type ContentFlaggingEvent string + +const ( + EventFlagged ContentFlaggingEvent = "flagged" + EventAssigned ContentFlaggingEvent = "assigned" + EventContentRemoved ContentFlaggingEvent = "removed" + EventContentDismissed ContentFlaggingEvent = "dismissed" +) + +type NotificationTarget string + +const ( + TargetReviewers NotificationTarget = "reviewers" + TargetAuthor NotificationTarget = "author" + TargetReporter NotificationTarget = "reporter" +) + +var ContentFlaggingDefaultReasons = []string{ + "Inappropriate content", + "Sensitive data", + "Security concern", + "Harassment or abuse", + "Spam or phishing", +} + +type ContentFlaggingNotificationSettings struct { + EventTargetMapping map[ContentFlaggingEvent][]NotificationTarget +} + +func (cfs *ContentFlaggingNotificationSettings) SetDefaults() { + if cfs.EventTargetMapping == nil { + cfs.EventTargetMapping = make(map[ContentFlaggingEvent][]NotificationTarget) + } + + if _, exists := cfs.EventTargetMapping[EventFlagged]; !exists { + cfs.EventTargetMapping[EventFlagged] = []NotificationTarget{TargetReviewers} + } else { + // Ensure TargetReviewers is always included for EventFlagged + if !slices.Contains(cfs.EventTargetMapping[EventFlagged], TargetReviewers) { + cfs.EventTargetMapping[EventFlagged] = append(cfs.EventTargetMapping[EventFlagged], TargetReviewers) + } + } + + if _, exists := cfs.EventTargetMapping[EventAssigned]; !exists { + cfs.EventTargetMapping[EventAssigned] = []NotificationTarget{TargetReviewers} + } + + if _, exists := cfs.EventTargetMapping[EventContentRemoved]; !exists { + cfs.EventTargetMapping[EventContentRemoved] = []NotificationTarget{TargetReviewers, TargetAuthor, TargetReporter} + } + + if _, exists := cfs.EventTargetMapping[EventContentDismissed]; !exists { + cfs.EventTargetMapping[EventContentDismissed] = []NotificationTarget{TargetReviewers, TargetReporter} + } +} + +func (cfs *ContentFlaggingNotificationSettings) IsValid() *AppError { + // Reviewers must be notified when content is flagged + // Disabling this option is not allowed in the UI, so this check is for safety and consistency. + + // Only valid events and targets are allowed + for event, targets := range cfs.EventTargetMapping { + if event != EventFlagged && event != EventAssigned && event != EventContentRemoved && event != EventContentDismissed { + return NewAppError("Config.IsValid", "model.config.is_valid.notification_settings.invalid_event", nil, "", http.StatusBadRequest) + } + + for _, target := range targets { + if target != TargetReviewers && target != TargetAuthor && target != TargetReporter { + return NewAppError("Config.IsValid", "model.config.is_valid.notification_settings.invalid_target", nil, fmt.Sprintf("target: %s", target), http.StatusBadRequest) + } + } + } + + if len(cfs.EventTargetMapping[EventFlagged]) == 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.notification_settings.reviewer_flagged_notification_disabled", nil, "", http.StatusBadRequest) + } + + // Search for the TargetReviewers in the EventFlagged event + reviewerFound := slices.Contains(cfs.EventTargetMapping[EventFlagged], TargetReviewers) + if !reviewerFound { + return NewAppError("Config.IsValid", "model.config.is_valid.notification_settings.reviewer_flagged_notification_disabled", nil, "", http.StatusBadRequest) + } + + return nil +} + +type TeamReviewerSetting struct { + Enabled *bool + ReviewerIds []string +} + +type ReviewerSettings struct { + CommonReviewers *bool + SystemAdminsAsReviewers *bool + TeamAdminsAsReviewers *bool +} + +func (rs *ReviewerSettings) SetDefaults() { + if rs.CommonReviewers == nil { + rs.CommonReviewers = NewPointer(true) + } + + if rs.SystemAdminsAsReviewers == nil { + rs.SystemAdminsAsReviewers = NewPointer(false) + } + + if rs.TeamAdminsAsReviewers == nil { + rs.TeamAdminsAsReviewers = NewPointer(true) + } +} + +type AdditionalContentFlaggingSettings struct { + Reasons *[]string + ReporterCommentRequired *bool + ReviewerCommentRequired *bool + HideFlaggedContent *bool +} + +func (acfs *AdditionalContentFlaggingSettings) SetDefaults() { + if acfs.Reasons == nil { + acfs.Reasons = &ContentFlaggingDefaultReasons + } + + if acfs.ReporterCommentRequired == nil { + acfs.ReporterCommentRequired = NewPointer(true) + } + + if acfs.ReviewerCommentRequired == nil { + acfs.ReviewerCommentRequired = NewPointer(true) + } + + if acfs.HideFlaggedContent == nil { + acfs.HideFlaggedContent = NewPointer(true) + } +} + +func (acfs *AdditionalContentFlaggingSettings) IsValid() *AppError { + if acfs.Reasons == nil || len(*acfs.Reasons) == 0 { + return NewAppError("Config.IsValid", "model.config.is_valid.content_flagging.reasons_not_set.app_error", nil, "", http.StatusBadRequest) + } + + return nil +} + +type ContentFlaggingSettingsBase struct { + EnableContentFlagging *bool + NotificationSettings *ContentFlaggingNotificationSettings + AdditionalSettings *AdditionalContentFlaggingSettings +} + +func (cfs *ContentFlaggingSettingsBase) SetDefaults() { + if cfs.EnableContentFlagging == nil { + cfs.EnableContentFlagging = NewPointer(false) + } + + if cfs.NotificationSettings == nil { + cfs.NotificationSettings = &ContentFlaggingNotificationSettings{ + EventTargetMapping: make(map[ContentFlaggingEvent][]NotificationTarget), + } + } + + if cfs.AdditionalSettings == nil { + cfs.AdditionalSettings = &AdditionalContentFlaggingSettings{} + } + + cfs.NotificationSettings.SetDefaults() + cfs.AdditionalSettings.SetDefaults() +} + +func (cfs *ContentFlaggingSettingsBase) IsValid() *AppError { + if err := cfs.NotificationSettings.IsValid(); err != nil { + return err + } + + if err := cfs.AdditionalSettings.IsValid(); err != nil { + return err + } + + return nil +} + +type ContentFlaggingSettings struct { + ContentFlaggingSettingsBase + ReviewerSettings *ReviewerSettings +} + +func (cfs *ContentFlaggingSettings) SetDefaults() { + cfs.ContentFlaggingSettingsBase.SetDefaults() + + if cfs.ReviewerSettings == nil { + cfs.ReviewerSettings = &ReviewerSettings{} + } + + cfs.ReviewerSettings.SetDefaults() +} + +func (cfs *ContentFlaggingSettings) IsValid() *AppError { + return cfs.ContentFlaggingSettingsBase.IsValid() +} + +type ContentFlaggingReportingConfig struct { + Reasons *[]string `json:"reasons"` + ReporterCommentRequired *bool `json:"reporter_comment_required"` + ReviewerCommentRequired *bool `json:"reviewer_comment_required"` + NotifyReporterOnDismissal *bool `json:"notify_reporter_on_dismissal,omitempty"` + NotifyReporterOnRemoval *bool `json:"notify_reporter_on_removal,omitempty"` +} + +type ReviewerIDsSettings struct { + CommonReviewerIds []string + TeamReviewersSetting map[string]*TeamReviewerSetting +} + +func (rs *ReviewerIDsSettings) SetDefaults() { + if rs.CommonReviewerIds == nil { + rs.CommonReviewerIds = []string{} + } + + if rs.TeamReviewersSetting == nil { + rs.TeamReviewersSetting = map[string]*TeamReviewerSetting{} + } +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/custom_profile_attributes.go b/vendor/github.com/mattermost/mattermost/server/public/model/custom_profile_attributes.go new file mode 100644 index 00000000..e697275a --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/custom_profile_attributes.go @@ -0,0 +1,339 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "encoding/json" + "errors" + "fmt" + "net/http" + "net/url" + "strings" +) + +const CustomProfileAttributesPropertyGroupName = "custom_profile_attributes" + +func CPASortOrder(p *PropertyField) int { + value, ok := p.Attrs[CustomProfileAttributesPropertyAttrsSortOrder] + if !ok { + return 0 + } + + sortOrder, ok := value.(float64) + if !ok { + return 0 + } + + return int(sortOrder) +} + +const ( + // Attributes keys + CustomProfileAttributesPropertyAttrsSortOrder = "sort_order" + CustomProfileAttributesPropertyAttrsValueType = "value_type" + CustomProfileAttributesPropertyAttrsVisibility = "visibility" + CustomProfileAttributesPropertyAttrsLDAP = "ldap" + CustomProfileAttributesPropertyAttrsSAML = "saml" + CustomProfileAttributesPropertyAttrsManaged = "managed" + + // Value Types + CustomProfileAttributesValueTypeEmail = "email" + CustomProfileAttributesValueTypeURL = "url" + CustomProfileAttributesValueTypePhone = "phone" + + // Visibility + CustomProfileAttributesVisibilityHidden = "hidden" + CustomProfileAttributesVisibilityWhenSet = "when_set" + CustomProfileAttributesVisibilityAlways = "always" + CustomProfileAttributesVisibilityDefault = CustomProfileAttributesVisibilityWhenSet + + // CPA options + CPAOptionNameMaxLength = 128 + CPAOptionColorMaxLength = 128 + + // CPA value constraints + CPAValueTypeTextMaxLength = 64 +) + +func IsKnownCPAValueType(valueType string) bool { + switch valueType { + case CustomProfileAttributesValueTypeEmail, + CustomProfileAttributesValueTypeURL, + CustomProfileAttributesValueTypePhone: + return true + } + + return false +} + +func IsKnownCPAVisibility(visibility string) bool { + switch visibility { + case CustomProfileAttributesVisibilityHidden, + CustomProfileAttributesVisibilityWhenSet, + CustomProfileAttributesVisibilityAlways: + return true + } + + return false +} + +type CustomProfileAttributesSelectOption struct { + ID string `json:"id"` + Name string `json:"name"` + Color string `json:"color"` +} + +func (c CustomProfileAttributesSelectOption) GetID() string { + return c.ID +} + +func (c CustomProfileAttributesSelectOption) GetName() string { + return c.Name +} + +func (c *CustomProfileAttributesSelectOption) SetID(id string) { + c.ID = id +} + +func (c CustomProfileAttributesSelectOption) IsValid() error { + if c.ID == "" { + return errors.New("id cannot be empty") + } + + if !IsValidId(c.ID) { + return errors.New("id is not a valid ID") + } + + if c.Name == "" { + return errors.New("name cannot be empty") + } + + if len(c.Name) > CPAOptionNameMaxLength { + return fmt.Errorf("name is too long, max length is %d", CPAOptionNameMaxLength) + } + + if c.Color != "" && len(c.Color) > CPAOptionColorMaxLength { + return fmt.Errorf("color is too long, max length is %d", CPAOptionColorMaxLength) + } + + return nil +} + +type CPAField struct { + PropertyField + Attrs CPAAttrs `json:"attrs"` +} + +type CPAAttrs struct { + Visibility string `json:"visibility"` + SortOrder float64 `json:"sort_order"` + Options PropertyOptions[*CustomProfileAttributesSelectOption] `json:"options"` + ValueType string `json:"value_type"` + LDAP string `json:"ldap"` + SAML string `json:"saml"` + Managed string `json:"managed"` +} + +func (c *CPAField) IsSynced() bool { + return c.Attrs.LDAP != "" || c.Attrs.SAML != "" +} + +func (c *CPAField) IsAdminManaged() bool { + return c.Attrs.Managed == "admin" +} + +func (c *CPAField) ToPropertyField() *PropertyField { + pf := c.PropertyField + + pf.Attrs = StringInterface{ + CustomProfileAttributesPropertyAttrsVisibility: c.Attrs.Visibility, + CustomProfileAttributesPropertyAttrsSortOrder: c.Attrs.SortOrder, + CustomProfileAttributesPropertyAttrsValueType: c.Attrs.ValueType, + PropertyFieldAttributeOptions: c.Attrs.Options, + CustomProfileAttributesPropertyAttrsLDAP: c.Attrs.LDAP, + CustomProfileAttributesPropertyAttrsSAML: c.Attrs.SAML, + CustomProfileAttributesPropertyAttrsManaged: c.Attrs.Managed, + } + + return &pf +} + +// SupportsOptions checks the CPAField type and determines if the type +// supports the use of options +func (c *CPAField) SupportsOptions() bool { + return c.Type == PropertyFieldTypeSelect || c.Type == PropertyFieldTypeMultiselect +} + +// SupportsSyncing checks the CPAField type and determines if it +// supports syncing with external sources of truth +func (c *CPAField) SupportsSyncing() bool { + return c.Type == PropertyFieldTypeText +} + +func (c *CPAField) SanitizeAndValidate() *AppError { + // first we clean unused attributes depending on the field type + if !c.SupportsOptions() { + c.Attrs.Options = nil + } + if !c.SupportsSyncing() { + c.Attrs.LDAP = "" + c.Attrs.SAML = "" + } + + // Clear sync properties if managed is set (mutual exclusivity) + if c.IsAdminManaged() { + c.Attrs.LDAP = "" + c.Attrs.SAML = "" + } + + switch c.Type { + case PropertyFieldTypeText: + if valueType := strings.TrimSpace(c.Attrs.ValueType); valueType != "" { + if !IsKnownCPAValueType(valueType) { + return NewAppError("SanitizeAndValidate", "app.custom_profile_attributes.sanitize_and_validate.app_error", map[string]any{ + "AttributeName": CustomProfileAttributesPropertyAttrsValueType, + "Reason": "unknown value type", + }, "", http.StatusUnprocessableEntity) + } + c.Attrs.ValueType = valueType + } + + case PropertyFieldTypeSelect, PropertyFieldTypeMultiselect: + options := c.Attrs.Options + + // add an ID to options with no ID + for i := range options { + if options[i].ID == "" { + options[i].ID = NewId() + } + } + + if err := options.IsValid(); err != nil { + return NewAppError("SanitizeAndValidate", "app.custom_profile_attributes.sanitize_and_validate.app_error", map[string]any{ + "AttributeName": PropertyFieldAttributeOptions, + "Reason": err.Error(), + }, "", http.StatusUnprocessableEntity).Wrap(err) + } + c.Attrs.Options = options + } + + visibility := CustomProfileAttributesVisibilityDefault + if visibilityAttr := strings.TrimSpace(c.Attrs.Visibility); visibilityAttr != "" { + if !IsKnownCPAVisibility(visibilityAttr) { + return NewAppError("SanitizeAndValidate", "app.custom_profile_attributes.sanitize_and_validate.app_error", map[string]any{ + "AttributeName": CustomProfileAttributesPropertyAttrsVisibility, + "Reason": "unknown visibility", + }, "", http.StatusUnprocessableEntity) + } + visibility = visibilityAttr + } + c.Attrs.Visibility = visibility + + // Validate managed field + if managed := strings.TrimSpace(c.Attrs.Managed); managed != "" { + if managed != "admin" { + return NewAppError("SanitizeAndValidate", "app.custom_profile_attributes.sanitize_and_validate.app_error", map[string]any{ + "AttributeName": CustomProfileAttributesPropertyAttrsManaged, + "Reason": "unknown managed type", + }, "", http.StatusBadRequest) + } + c.Attrs.Managed = managed + } + + return nil +} + +func NewCPAFieldFromPropertyField(pf *PropertyField) (*CPAField, error) { + attrsJSON, err := json.Marshal(pf.Attrs) + if err != nil { + return nil, err + } + + var attrs CPAAttrs + err = json.Unmarshal(attrsJSON, &attrs) + if err != nil { + return nil, err + } + + return &CPAField{ + PropertyField: *pf, + Attrs: attrs, + }, nil +} + +// SanitizeAndValidatePropertyValue validates and sanitizes the given +// property value based on the field type +func SanitizeAndValidatePropertyValue(cpaField *CPAField, rawValue json.RawMessage) (json.RawMessage, error) { + fieldType := cpaField.Type + + // build a list of existing options so we can check later if the values exist + optionsMap := map[string]struct{}{} + for _, v := range cpaField.Attrs.Options { + optionsMap[v.ID] = struct{}{} + } + + switch fieldType { + case PropertyFieldTypeText, PropertyFieldTypeDate, PropertyFieldTypeSelect, PropertyFieldTypeUser: + var value string + if err := json.Unmarshal(rawValue, &value); err != nil { + return nil, err + } + value = strings.TrimSpace(value) + + if fieldType == PropertyFieldTypeText { + if len(value) > CPAValueTypeTextMaxLength { + return nil, fmt.Errorf("value too long") + } + + if cpaField.Attrs.ValueType == CustomProfileAttributesValueTypeEmail && !IsValidEmail(value) { + return nil, fmt.Errorf("invalid email") + } + + if cpaField.Attrs.ValueType == CustomProfileAttributesValueTypeURL { + _, err := url.Parse(value) + if err != nil { + return nil, fmt.Errorf("invalid url: %w", err) + } + } + } + + if fieldType == PropertyFieldTypeSelect && value != "" { + if _, ok := optionsMap[value]; !ok { + return nil, fmt.Errorf("option \"%s\" does not exist", value) + } + } + + if fieldType == PropertyFieldTypeUser && value != "" && !IsValidId(value) { + return nil, fmt.Errorf("invalid user id") + } + return json.Marshal(value) + + case PropertyFieldTypeMultiselect, PropertyFieldTypeMultiuser: + var values []string + if err := json.Unmarshal(rawValue, &values); err != nil { + return nil, err + } + filteredValues := make([]string, 0, len(values)) + for _, v := range values { + trimmed := strings.TrimSpace(v) + if trimmed == "" { + continue + } + if fieldType == PropertyFieldTypeMultiselect { + if _, ok := optionsMap[v]; !ok { + return nil, fmt.Errorf("option \"%s\" does not exist", v) + } + } + + if fieldType == PropertyFieldTypeMultiuser && !IsValidId(trimmed) { + return nil, fmt.Errorf("invalid user id: %s", trimmed) + } + filteredValues = append(filteredValues, trimmed) + } + return json.Marshal(filteredValues) + + default: + return nil, fmt.Errorf("unknown field type: %s", fieldType) + } +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/custom_status.go b/vendor/github.com/mattermost/mattermost/server/public/model/custom_status.go new file mode 100644 index 00000000..dcd9aba8 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/custom_status.go @@ -0,0 +1,136 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "bytes" + "encoding/json" + "fmt" + "time" +) + +const ( + UserPropsKeyCustomStatus = "customStatus" + + CustomStatusTextMaxRunes = 100 + MaxRecentCustomStatuses = 5 + DefaultCustomStatusEmoji = "speech_balloon" +) + +var validCustomStatusDuration = map[string]bool{ + "thirty_minutes": true, + "one_hour": true, + "four_hours": true, + "today": true, + "this_week": true, + "date_and_time": true, +} + +type CustomStatus struct { + Emoji string `json:"emoji"` + Text string `json:"text"` + Duration string `json:"duration"` + ExpiresAt time.Time `json:"expires_at"` +} + +func (cs *CustomStatus) PreSave() { + if cs.Duration == "" && !cs.ExpiresAt.Before(time.Now()) { + cs.Duration = "date_and_time" + } + + runes := []rune(cs.Text) + if len(runes) > CustomStatusTextMaxRunes { + cs.Text = string(runes[:CustomStatusTextMaxRunes]) + } +} + +func (cs *CustomStatus) AreDurationAndExpirationTimeValid() bool { + if cs.Duration == "" && (cs.ExpiresAt.IsZero() || !cs.ExpiresAt.Before(time.Now())) { + return true + } + + if validCustomStatusDuration[cs.Duration] && !cs.ExpiresAt.Before(time.Now()) { + return true + } + + return false +} + +func RuneToHexadecimalString(r rune) string { + return fmt.Sprintf("%04x", r) +} + +type RecentCustomStatuses []CustomStatus + +func (rcs RecentCustomStatuses) Contains(cs *CustomStatus) (bool, error) { + if cs == nil { + return false, nil + } + + csJSON, jsonErr := json.Marshal(cs) + if jsonErr != nil { + return false, jsonErr + } + + // status is empty + if len(csJSON) == 0 || (cs.Emoji == "" && cs.Text == "") { + return false, nil + } + + for _, status := range rcs { + js, jsonErr := json.Marshal(status) + if jsonErr != nil { + return false, jsonErr + } + if bytes.Equal(js, csJSON) { + return true, nil + } + } + + return false, nil +} + +func (rcs RecentCustomStatuses) Add(cs *CustomStatus) RecentCustomStatuses { + newRCS := rcs[:0] + + // if same `text` exists in existing recent custom statuses, modify existing status + for _, status := range rcs { + if status.Text != cs.Text { + newRCS = append(newRCS, status) + } + } + newRCS = append(RecentCustomStatuses{*cs}, newRCS...) + if len(newRCS) > MaxRecentCustomStatuses { + newRCS = newRCS[:MaxRecentCustomStatuses] + } + return newRCS +} + +func (rcs RecentCustomStatuses) Remove(cs *CustomStatus) (RecentCustomStatuses, error) { + if cs == nil { + return rcs, nil + } + + csJSON, jsonErr := json.Marshal(cs) + if jsonErr != nil { + return rcs, jsonErr + } + + if len(csJSON) == 0 || (cs.Emoji == "" && cs.Text == "") { + return rcs, nil + } + + newRCS := rcs[:0] + for _, status := range rcs { + js, jsonErr := json.Marshal(status) + if jsonErr != nil { + return rcs, jsonErr + } + if !bytes.Equal(js, csJSON) { + newRCS = append(newRCS, status) + } + } + + return newRCS, nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/data_retention_policy.go b/vendor/github.com/mattermost/mattermost/server/public/model/data_retention_policy.go new file mode 100644 index 00000000..7d9c8e5c --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/data_retention_policy.go @@ -0,0 +1,105 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +type GlobalRetentionPolicy struct { + MessageDeletionEnabled bool `json:"message_deletion_enabled"` + FileDeletionEnabled bool `json:"file_deletion_enabled"` + MessageRetentionCutoff int64 `json:"message_retention_cutoff"` + FileRetentionCutoff int64 `json:"file_retention_cutoff"` +} + +type RetentionPolicy struct { + ID string `db:"Id" json:"id"` + DisplayName string `json:"display_name"` + PostDurationDays *int64 `db:"PostDuration" json:"post_duration"` +} + +type RetentionPolicyWithTeamAndChannelIDs struct { + RetentionPolicy + TeamIDs []string `json:"team_ids"` + ChannelIDs []string `json:"channel_ids"` +} + +func (o *RetentionPolicyWithTeamAndChannelIDs) Auditable() map[string]any { + return map[string]any{ + "retention_policy": o.RetentionPolicy, + "team_ids": o.TeamIDs, + "channel_ids": o.ChannelIDs, + } +} + +type RetentionPolicyWithTeamAndChannelCounts struct { + RetentionPolicy + ChannelCount int64 `json:"channel_count"` + TeamCount int64 `json:"team_count"` +} + +func (o *RetentionPolicyWithTeamAndChannelCounts) Auditable() map[string]any { + return map[string]any{ + "retention_policy": o.RetentionPolicy, + "channel_count": o.ChannelCount, + "team_count": o.TeamCount, + } +} + +type RetentionPolicyChannel struct { + PolicyID string `db:"PolicyId"` + ChannelID string `db:"ChannelId"` +} + +type RetentionPolicyTeam struct { + PolicyID string `db:"PolicyId"` + TeamID string `db:"TeamId"` +} + +type RetentionPolicyWithTeamAndChannelCountsList struct { + Policies []*RetentionPolicyWithTeamAndChannelCounts `json:"policies"` + TotalCount int64 `json:"total_count"` +} + +type RetentionPolicyForTeam struct { + TeamID string `db:"Id" json:"team_id"` + PostDurationDays int64 `db:"PostDuration" json:"post_duration"` +} + +type RetentionPolicyForTeamList struct { + Policies []*RetentionPolicyForTeam `json:"policies"` + TotalCount int64 `json:"total_count"` +} + +type RetentionPolicyForChannel struct { + ChannelID string `db:"Id" json:"channel_id"` + PostDurationDays int64 `db:"PostDuration" json:"post_duration"` +} + +type RetentionPolicyForChannelList struct { + Policies []*RetentionPolicyForChannel `json:"policies"` + TotalCount int64 `json:"total_count"` +} + +type RetentionPolicyCursor struct { + ChannelPoliciesDone bool + TeamPoliciesDone bool + GlobalPoliciesDone bool +} + +type RetentionIdsForDeletion struct { + Id string + TableName string + Ids []string +} + +type RetentionPolicyBatchConfigs struct { + Now int64 + GlobalPolicyEndTime int64 + Limit int64 + PreservePinnedPosts bool +} + +func (r *RetentionIdsForDeletion) PreSave() { + if r.Id == "" { + r.Id = NewId() + } +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/draft.go b/vendor/github.com/mattermost/mattermost/server/public/model/draft.go new file mode 100644 index 00000000..eb5640ab --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/draft.go @@ -0,0 +1,109 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "net/http" + "sync" + "unicode/utf8" +) + +type Draft struct { + CreateAt int64 `json:"create_at"` + UpdateAt int64 `json:"update_at"` + DeleteAt int64 `json:"delete_at"` // Deprecated, we now just hard delete the rows + UserId string `json:"user_id"` + ChannelId string `json:"channel_id"` + RootId string `json:"root_id"` + + Message string `json:"message"` + + propsMu sync.RWMutex `db:"-"` // Unexported mutex used to guard Draft.Props. + Props StringInterface `json:"props"` // Deprecated: use GetProps() + FileIds StringArray `json:"file_ids,omitempty"` + Metadata *PostMetadata `json:"metadata,omitempty"` + Priority StringInterface `json:"priority,omitempty"` +} + +func (o *Draft) IsValid(maxDraftSize int) *AppError { + if utf8.RuneCountInString(o.Message) > maxDraftSize { + return NewAppError("Drafts.IsValid", "model.draft.is_valid.message_length.app_error", + map[string]any{"Length": utf8.RuneCountInString(o.Message), "MaxLength": maxDraftSize}, "channelid="+o.ChannelId, http.StatusBadRequest) + } + + return o.BaseIsValid() +} + +func (o *Draft) BaseIsValid() *AppError { + if o.CreateAt == 0 { + return NewAppError("Drafts.IsValid", "model.draft.is_valid.create_at.app_error", nil, "channelid="+o.ChannelId, http.StatusBadRequest) + } + + if o.UpdateAt == 0 { + return NewAppError("Drafts.IsValid", "model.draft.is_valid.update_at.app_error", nil, "channelid="+o.ChannelId, http.StatusBadRequest) + } + + if !IsValidId(o.UserId) { + return NewAppError("Drafts.IsValid", "model.draft.is_valid.user_id.app_error", nil, "", http.StatusBadRequest) + } + + if !IsValidId(o.ChannelId) { + return NewAppError("Drafts.IsValid", "model.draft.is_valid.channel_id.app_error", nil, "", http.StatusBadRequest) + } + + if !(IsValidId(o.RootId) || o.RootId == "") { + return NewAppError("Drafts.IsValid", "model.draft.is_valid.root_id.app_error", nil, "", http.StatusBadRequest) + } + + if utf8.RuneCountInString(ArrayToJSON(o.FileIds)) > PostFileidsMaxRunes { + return NewAppError("Drafts.IsValid", "model.draft.is_valid.file_ids.app_error", nil, "channelid="+o.ChannelId, http.StatusBadRequest) + } + + if utf8.RuneCountInString(StringInterfaceToJSON(o.GetProps())) > PostPropsMaxRunes { + return NewAppError("Drafts.IsValid", "model.draft.is_valid.props.app_error", nil, "channelid="+o.ChannelId, http.StatusBadRequest) + } + + if utf8.RuneCountInString(StringInterfaceToJSON(o.Priority)) > PostPropsMaxRunes { + return NewAppError("Drafts.IsValid", "model.draft.is_valid.priority.app_error", nil, "channelid="+o.ChannelId, http.StatusBadRequest) + } + + return nil +} + +func (o *Draft) SetProps(props StringInterface) { + o.propsMu.Lock() + defer o.propsMu.Unlock() + o.Props = props +} + +func (o *Draft) GetProps() StringInterface { + o.propsMu.RLock() + defer o.propsMu.RUnlock() + return o.Props +} + +func (o *Draft) PreSave() { + if o.CreateAt == 0 { + o.CreateAt = GetMillis() + o.UpdateAt = o.CreateAt + } else { + o.UpdateAt = GetMillis() + } + + o.DeleteAt = 0 + o.PreCommit() +} + +func (o *Draft) PreCommit() { + if o.GetProps() == nil { + o.SetProps(make(map[string]any)) + } + + if o.FileIds == nil { + o.FileIds = []string{} + } + + // There's a rare bug where the client sends up duplicate FileIds so protect against that + o.FileIds = RemoveDuplicateStrings(o.FileIds) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/email_notification.go b/vendor/github.com/mattermost/mattermost/server/public/model/email_notification.go new file mode 100644 index 00000000..e9fc2cb6 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/email_notification.go @@ -0,0 +1,37 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +type EmailNotificationContent struct { + Subject string `json:"subject,omitempty"` + Title string `json:"title,omitempty"` + SubTitle string `json:"subtitle,omitempty"` + MessageHTML string `json:"message_html,omitempty"` + MessageText string `json:"message_text,omitempty"` + ButtonText string `json:"button_text,omitempty"` + ButtonURL string `json:"button_url,omitempty"` + FooterText string `json:"footer_text,omitempty"` +} + +type EmailNotification struct { + PostId string `json:"post_id"` + ChannelId string `json:"channel_id"` + TeamId string `json:"team_id"` + SenderId string `json:"sender_id"` + SenderDisplayName string `json:"sender_display_name,omitempty"` + RecipientId string `json:"recipient_id"` + RootId string `json:"root_id,omitempty"` + + ChannelType string `json:"channel_type"` + ChannelName string `json:"channel_name"` + TeamName string `json:"team_name"` + SenderUsername string `json:"sender_username"` + IsDirectMessage bool `json:"is_direct_message"` + IsGroupMessage bool `json:"is_group_message"` + IsThreadReply bool `json:"is_thread_reply"` + IsCRTEnabled bool `json:"is_crt_enabled"` + UseMilitaryTime bool `json:"use_military_time"` + + EmailNotificationContent +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/emoji.go b/vendor/github.com/mattermost/mattermost/server/public/model/emoji.go new file mode 100644 index 00000000..bb659c50 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/emoji.go @@ -0,0 +1,109 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "net/http" + "regexp" + "sort" +) + +const ( + EmojiNameMaxLength = 64 + EmojiSortByName = "name" +) + +var EmojiPattern = regexp.MustCompile(`:[a-zA-Z0-9_+-]+:`) + +type Emoji struct { + Id string `json:"id"` + CreateAt int64 `json:"create_at"` + UpdateAt int64 `json:"update_at"` + DeleteAt int64 `json:"delete_at"` + CreatorId string `json:"creator_id"` + Name string `json:"name"` +} + +func (emoji *Emoji) Auditable() map[string]any { + return map[string]any{ + "id": emoji.Id, + "create_at": emoji.CreateAt, + "update_at": emoji.UpdateAt, + "delete_at": emoji.CreateAt, + "creator_id": emoji.CreatorId, + "name": emoji.Name, + } +} + +func IsSystemEmojiName(emojiName string) bool { + _, ok := SystemEmojis[emojiName] + return ok +} + +func GetSystemEmojiId(emojiName string) (string, bool) { + id, found := SystemEmojis[emojiName] + return id, found +} + +func makeReverseEmojiMap() map[string][]string { + reverseEmojiMap := make(map[string][]string) + for key, value := range SystemEmojis { + emojiNames := reverseEmojiMap[value] + emojiNames = append(emojiNames, key) + sort.Strings(emojiNames) + reverseEmojiMap[value] = emojiNames + } + + return reverseEmojiMap +} + +var reverseSystemEmojisMap = makeReverseEmojiMap() + +func GetEmojiNameFromUnicode(unicode string) (emojiName string, count int) { + if emojiNames, found := reverseSystemEmojisMap[unicode]; found { + return emojiNames[0], len(emojiNames) + } + + return "", 0 +} + +func (emoji *Emoji) IsValid() *AppError { + if !IsValidId(emoji.Id) { + return NewAppError("Emoji.IsValid", "model.emoji.id.app_error", nil, "", http.StatusBadRequest) + } + + if emoji.CreateAt == 0 { + return NewAppError("Emoji.IsValid", "model.emoji.create_at.app_error", nil, "id="+emoji.Id, http.StatusBadRequest) + } + + if emoji.UpdateAt == 0 { + return NewAppError("Emoji.IsValid", "model.emoji.update_at.app_error", nil, "id="+emoji.Id, http.StatusBadRequest) + } + + if len(emoji.CreatorId) > 26 { + return NewAppError("Emoji.IsValid", "model.emoji.user_id.app_error", nil, "", http.StatusBadRequest) + } + + return IsValidEmojiName(emoji.Name) +} + +func IsValidEmojiName(name string) *AppError { + if name == "" || len(name) > EmojiNameMaxLength || !IsValidAlphaNumHyphenUnderscorePlus(name) { + return NewAppError("Emoji.IsValid", "model.emoji.name.app_error", nil, "", http.StatusBadRequest) + } + if IsSystemEmojiName(name) { + return NewAppError("Emoji.IsValid", "model.emoji.system_emoji_name.app_error", nil, "", http.StatusBadRequest) + } + + return nil +} + +func (emoji *Emoji) PreSave() { + if emoji.Id == "" { + emoji.Id = NewId() + } + + emoji.CreateAt = GetMillis() + emoji.UpdateAt = emoji.CreateAt +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/emoji_data.go b/vendor/github.com/mattermost/mattermost/server/public/model/emoji_data.go new file mode 100644 index 00000000..053ef326 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/emoji_data.go @@ -0,0 +1,4473 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +// This file is automatically generated via `/webapp/channels/build/make_emojis.mjs`. Do not modify it manually. + +package model + +var SystemEmojis = map[string]string{ + "grinning": "1f600", + "smiley": "1f603", + "smile": "1f604", + "grin": "1f601", + "laughing": "1f606", + "satisfied": "1f606", + "sweat_smile": "1f605", + "rolling_on_the_floor_laughing": "1f923", + "rofl": "1f923", + "joy": "1f602", + "slightly_smiling_face": "1f642", + "upside_down_face": "1f643", + "wink": "1f609", + "blush": "1f60a", + "innocent": "1f607", + "smiling_face_with_3_hearts": "1f970", + "heart_eyes": "1f60d", + "star-struck": "1f929", + "grinning_face_with_star_eyes": "1f929", + "kissing_heart": "1f618", + "kissing": "1f617", + "relaxed": "263a-fe0f", + "kissing_closed_eyes": "1f61a", + "kissing_smiling_eyes": "1f619", + "smiling_face_with_tear": "1f972", + "yum": "1f60b", + "stuck_out_tongue": "1f61b", + "stuck_out_tongue_winking_eye": "1f61c", + "zany_face": "1f92a", + "grinning_face_with_one_large_and_one_small_eye": "1f92a", + "stuck_out_tongue_closed_eyes": "1f61d", + "money_mouth_face": "1f911", + "hugging_face": "1f917", + "hugs": "1f917", + "face_with_hand_over_mouth": "1f92d", + "smiling_face_with_smiling_eyes_and_hand_covering_mouth": "1f92d", + "shushing_face": "1f92b", + "face_with_finger_covering_closed_lips": "1f92b", + "thinking_face": "1f914", + "thinking": "1f914", + "zipper_mouth_face": "1f910", + "face_with_raised_eyebrow": "1f928", + "face_with_one_eyebrow_raised": "1f928", + "neutral_face": "1f610", + "expressionless": "1f611", + "no_mouth": "1f636", + "smirk": "1f60f", + "unamused": "1f612", + "face_with_rolling_eyes": "1f644", + "roll_eyes": "1f644", + "grimacing": "1f62c", + "lying_face": "1f925", + "relieved": "1f60c", + "pensive": "1f614", + "sleepy": "1f62a", + "drooling_face": "1f924", + "sleeping": "1f634", + "mask": "1f637", + "face_with_thermometer": "1f912", + "face_with_head_bandage": "1f915", + "nauseated_face": "1f922", + "face_vomiting": "1f92e", + "face_with_open_mouth_vomiting": "1f92e", + "sneezing_face": "1f927", + "hot_face": "1f975", + "cold_face": "1f976", + "woozy_face": "1f974", + "dizzy_face": "1f635", + "exploding_head": "1f92f", + "shocked_face_with_exploding_head": "1f92f", + "face_with_cowboy_hat": "1f920", + "cowboy_hat_face": "1f920", + "partying_face": "1f973", + "disguised_face": "1f978", + "sunglasses": "1f60e", + "nerd_face": "1f913", + "face_with_monocle": "1f9d0", + "confused": "1f615", + "worried": "1f61f", + "slightly_frowning_face": "1f641", + "white_frowning_face": "2639-fe0f", + "frowning_face": "2639-fe0f", + "open_mouth": "1f62e", + "hushed": "1f62f", + "astonished": "1f632", + "flushed": "1f633", + "pleading_face": "1f97a", + "frowning": "1f626", + "anguished": "1f627", + "fearful": "1f628", + "cold_sweat": "1f630", + "disappointed_relieved": "1f625", + "cry": "1f622", + "sob": "1f62d", + "scream": "1f631", + "confounded": "1f616", + "persevere": "1f623", + "disappointed": "1f61e", + "sweat": "1f613", + "weary": "1f629", + "tired_face": "1f62b", + "yawning_face": "1f971", + "triumph": "1f624", + "rage": "1f621", + "pout": "1f621", + "angry": "1f620", + "face_with_symbols_on_mouth": "1f92c", + "serious_face_with_symbols_covering_mouth": "1f92c", + "smiling_imp": "1f608", + "imp": "1f47f", + "skull": "1f480", + "skull_and_crossbones": "2620-fe0f", + "hankey": "1f4a9", + "poop": "1f4a9", + "shit": "1f4a9", + "clown_face": "1f921", + "japanese_ogre": "1f479", + "japanese_goblin": "1f47a", + "ghost": "1f47b", + "alien": "1f47d", + "space_invader": "1f47e", + "robot_face": "1f916", + "robot": "1f916", + "smiley_cat": "1f63a", + "smile_cat": "1f638", + "joy_cat": "1f639", + "heart_eyes_cat": "1f63b", + "smirk_cat": "1f63c", + "kissing_cat": "1f63d", + "scream_cat": "1f640", + "crying_cat_face": "1f63f", + "pouting_cat": "1f63e", + "see_no_evil": "1f648", + "hear_no_evil": "1f649", + "speak_no_evil": "1f64a", + "kiss": "1f48b", + "love_letter": "1f48c", + "cupid": "1f498", + "gift_heart": "1f49d", + "sparkling_heart": "1f496", + "heartpulse": "1f497", + "heartbeat": "1f493", + "revolving_hearts": "1f49e", + "two_hearts": "1f495", + "heart_decoration": "1f49f", + "heavy_heart_exclamation_mark_ornament": "2763-fe0f", + "heavy_heart_exclamation": "2763-fe0f", + "broken_heart": "1f494", + "heart": "2764-fe0f", + "orange_heart": "1f9e1", + "yellow_heart": "1f49b", + "green_heart": "1f49a", + "blue_heart": "1f499", + "purple_heart": "1f49c", + "brown_heart": "1f90e", + "black_heart": "1f5a4", + "white_heart": "1f90d", + "100": "1f4af", + "anger": "1f4a2", + "boom": "1f4a5", + "collision": "1f4a5", + "dizzy": "1f4ab", + "sweat_drops": "1f4a6", + "dash": "1f4a8", + "hole": "1f573-fe0f", + "bomb": "1f4a3", + "speech_balloon": "1f4ac", + "eye-in-speech-bubble": "1f441-fe0f-200d-1f5e8-fe0f", + "left_speech_bubble": "1f5e8-fe0f", + "right_anger_bubble": "1f5ef-fe0f", + "thought_balloon": "1f4ad", + "zzz": "1f4a4", + "wave": "1f44b", + "raised_back_of_hand": "1f91a", + "raised_hand_with_fingers_splayed": "1f590-fe0f", + "hand": "270b", + "raised_hand": "270b", + "spock-hand": "1f596", + "vulcan_salute": "1f596", + "ok_hand": "1f44c", + "pinched_fingers": "1f90c", + "pinching_hand": "1f90f", + "v": "270c-fe0f", + "crossed_fingers": "1f91e", + "hand_with_index_and_middle_fingers_crossed": "1f91e", + "i_love_you_hand_sign": "1f91f", + "the_horns": "1f918", + "sign_of_the_horns": "1f918", + "metal": "1f918", + "call_me_hand": "1f919", + "point_left": "1f448", + "point_right": "1f449", + "point_up_2": "1f446", + "middle_finger": "1f595", + "reversed_hand_with_middle_finger_extended": "1f595", + "fu": "1f595", + "point_down": "1f447", + "point_up": "261d-fe0f", + "+1": "1f44d", + "thumbsup": "1f44d", + "-1": "1f44e", + "thumbsdown": "1f44e", + "fist": "270a", + "fist_raised": "270a", + "facepunch": "1f44a", + "punch": "1f44a", + "fist_oncoming": "1f44a", + "left-facing_fist": "1f91b", + "fist_left": "1f91b", + "right-facing_fist": "1f91c", + "fist_right": "1f91c", + "clap": "1f44f", + "raised_hands": "1f64c", + "open_hands": "1f450", + "palms_up_together": "1f932", + "handshake": "1f91d", + "pray": "1f64f", + "writing_hand": "270d-fe0f", + "nail_care": "1f485", + "selfie": "1f933", + "muscle": "1f4aa", + "mechanical_arm": "1f9be", + "mechanical_leg": "1f9bf", + "leg": "1f9b5", + "foot": "1f9b6", + "ear": "1f442", + "ear_with_hearing_aid": "1f9bb", + "nose": "1f443", + "brain": "1f9e0", + "anatomical_heart": "1fac0", + "lungs": "1fac1", + "tooth": "1f9b7", + "bone": "1f9b4", + "eyes": "1f440", + "eye": "1f441-fe0f", + "tongue": "1f445", + "lips": "1f444", + "baby": "1f476", + "child": "1f9d2", + "boy": "1f466", + "girl": "1f467", + "adult": "1f9d1", + "person_with_blond_hair": "1f471", + "man": "1f468", + "bearded_person": "1f9d4", + "red_haired_man": "1f468-200d-1f9b0", + "curly_haired_man": "1f468-200d-1f9b1", + "white_haired_man": "1f468-200d-1f9b3", + "bald_man": "1f468-200d-1f9b2", + "woman": "1f469", + "red_haired_woman": "1f469-200d-1f9b0", + "red_haired_person": "1f9d1-200d-1f9b0", + "curly_haired_woman": "1f469-200d-1f9b1", + "curly_haired_person": "1f9d1-200d-1f9b1", + "white_haired_woman": "1f469-200d-1f9b3", + "white_haired_person": "1f9d1-200d-1f9b3", + "bald_woman": "1f469-200d-1f9b2", + "bald_person": "1f9d1-200d-1f9b2", + "blond-haired-woman": "1f471-200d-2640-fe0f", + "blonde_woman": "1f471-200d-2640-fe0f", + "blond-haired-man": "1f471-200d-2642-fe0f", + "blonde_man": "1f471-200d-2642-fe0f", + "older_adult": "1f9d3", + "older_man": "1f474", + "older_woman": "1f475", + "person_frowning": "1f64d", + "man-frowning": "1f64d-200d-2642-fe0f", + "frowning_man": "1f64d-200d-2642-fe0f", + "woman-frowning": "1f64d-200d-2640-fe0f", + "frowning_woman": "1f64d-200d-2640-fe0f", + "person_with_pouting_face": "1f64e", + "man-pouting": "1f64e-200d-2642-fe0f", + "pouting_man": "1f64e-200d-2642-fe0f", + "woman-pouting": "1f64e-200d-2640-fe0f", + "pouting_woman": "1f64e-200d-2640-fe0f", + "no_good": "1f645", + "man-gesturing-no": "1f645-200d-2642-fe0f", + "ng_man": "1f645-200d-2642-fe0f", + "no_good_man": "1f645-200d-2642-fe0f", + "woman-gesturing-no": "1f645-200d-2640-fe0f", + "no_good_woman": "1f645-200d-2640-fe0f", + "ng_woman": "1f645-200d-2640-fe0f", + "ok_woman": "1f646", + "man-gesturing-ok": "1f646-200d-2642-fe0f", + "ok_man": "1f646-200d-2642-fe0f", + "woman-gesturing-ok": "1f646-200d-2640-fe0f", + "information_desk_person": "1f481", + "man-tipping-hand": "1f481-200d-2642-fe0f", + "tipping_hand_man": "1f481-200d-2642-fe0f", + "woman-tipping-hand": "1f481-200d-2640-fe0f", + "tipping_hand_woman": "1f481-200d-2640-fe0f", + "raising_hand": "1f64b", + "man-raising-hand": "1f64b-200d-2642-fe0f", + "raising_hand_man": "1f64b-200d-2642-fe0f", + "woman-raising-hand": "1f64b-200d-2640-fe0f", + "raising_hand_woman": "1f64b-200d-2640-fe0f", + "deaf_person": "1f9cf", + "deaf_man": "1f9cf-200d-2642-fe0f", + "deaf_woman": "1f9cf-200d-2640-fe0f", + "bow": "1f647", + "man-bowing": "1f647-200d-2642-fe0f", + "bowing_man": "1f647-200d-2642-fe0f", + "woman-bowing": "1f647-200d-2640-fe0f", + "bowing_woman": "1f647-200d-2640-fe0f", + "face_palm": "1f926", + "man-facepalming": "1f926-200d-2642-fe0f", + "man_facepalming": "1f926-200d-2642-fe0f", + "woman-facepalming": "1f926-200d-2640-fe0f", + "woman_facepalming": "1f926-200d-2640-fe0f", + "shrug": "1f937", + "man-shrugging": "1f937-200d-2642-fe0f", + "man_shrugging": "1f937-200d-2642-fe0f", + "woman-shrugging": "1f937-200d-2640-fe0f", + "woman_shrugging": "1f937-200d-2640-fe0f", + "health_worker": "1f9d1-200d-2695-fe0f", + "doctor": "1f9d1-200d-2695-fe0f", + "male-doctor": "1f468-200d-2695-fe0f", + "man_health_worker": "1f468-200d-2695-fe0f", + "female-doctor": "1f469-200d-2695-fe0f", + "woman_health_worker": "1f469-200d-2695-fe0f", + "student": "1f9d1-200d-1f393", + "male-student": "1f468-200d-1f393", + "man_student": "1f468-200d-1f393", + "female-student": "1f469-200d-1f393", + "woman_student": "1f469-200d-1f393", + "teacher": "1f9d1-200d-1f3eb", + "male-teacher": "1f468-200d-1f3eb", + "man_teacher": "1f468-200d-1f3eb", + "female-teacher": "1f469-200d-1f3eb", + "woman_teacher": "1f469-200d-1f3eb", + "judge": "1f9d1-200d-2696-fe0f", + "male-judge": "1f468-200d-2696-fe0f", + "man_judge": "1f468-200d-2696-fe0f", + "female-judge": "1f469-200d-2696-fe0f", + "woman_judge": "1f469-200d-2696-fe0f", + "farmer": "1f9d1-200d-1f33e", + "male-farmer": "1f468-200d-1f33e", + "man_farmer": "1f468-200d-1f33e", + "female-farmer": "1f469-200d-1f33e", + "woman_farmer": "1f469-200d-1f33e", + "cook": "1f9d1-200d-1f373", + "male-cook": "1f468-200d-1f373", + "man_cook": "1f468-200d-1f373", + "female-cook": "1f469-200d-1f373", + "woman_cook": "1f469-200d-1f373", + "mechanic": "1f9d1-200d-1f527", + "male-mechanic": "1f468-200d-1f527", + "man_mechanic": "1f468-200d-1f527", + "female-mechanic": "1f469-200d-1f527", + "woman_mechanic": "1f469-200d-1f527", + "factory_worker": "1f9d1-200d-1f3ed", + "male-factory-worker": "1f468-200d-1f3ed", + "man_factory_worker": "1f468-200d-1f3ed", + "female-factory-worker": "1f469-200d-1f3ed", + "woman_factory_worker": "1f469-200d-1f3ed", + "office_worker": "1f9d1-200d-1f4bc", + "male-office-worker": "1f468-200d-1f4bc", + "man_office_worker": "1f468-200d-1f4bc", + "female-office-worker": "1f469-200d-1f4bc", + "woman_office_worker": "1f469-200d-1f4bc", + "scientist": "1f9d1-200d-1f52c", + "male-scientist": "1f468-200d-1f52c", + "man_scientist": "1f468-200d-1f52c", + "female-scientist": "1f469-200d-1f52c", + "woman_scientist": "1f469-200d-1f52c", + "technologist": "1f9d1-200d-1f4bb", + "male-technologist": "1f468-200d-1f4bb", + "man_technologist": "1f468-200d-1f4bb", + "female-technologist": "1f469-200d-1f4bb", + "woman_technologist": "1f469-200d-1f4bb", + "singer": "1f9d1-200d-1f3a4", + "male-singer": "1f468-200d-1f3a4", + "man_singer": "1f468-200d-1f3a4", + "female-singer": "1f469-200d-1f3a4", + "woman_singer": "1f469-200d-1f3a4", + "artist": "1f9d1-200d-1f3a8", + "male-artist": "1f468-200d-1f3a8", + "man_artist": "1f468-200d-1f3a8", + "female-artist": "1f469-200d-1f3a8", + "woman_artist": "1f469-200d-1f3a8", + "pilot": "1f9d1-200d-2708-fe0f", + "male-pilot": "1f468-200d-2708-fe0f", + "man_pilot": "1f468-200d-2708-fe0f", + "female-pilot": "1f469-200d-2708-fe0f", + "woman_pilot": "1f469-200d-2708-fe0f", + "astronaut": "1f9d1-200d-1f680", + "male-astronaut": "1f468-200d-1f680", + "man_astronaut": "1f468-200d-1f680", + "female-astronaut": "1f469-200d-1f680", + "woman_astronaut": "1f469-200d-1f680", + "firefighter": "1f9d1-200d-1f692", + "male-firefighter": "1f468-200d-1f692", + "man_firefighter": "1f468-200d-1f692", + "female-firefighter": "1f469-200d-1f692", + "woman_firefighter": "1f469-200d-1f692", + "cop": "1f46e", + "male-police-officer": "1f46e-200d-2642-fe0f", + "policeman": "1f46e-200d-2642-fe0f", + "female-police-officer": "1f46e-200d-2640-fe0f", + "policewoman": "1f46e-200d-2640-fe0f", + "sleuth_or_spy": "1f575-fe0f", + "detective": "1f575-fe0f", + "male-detective": "1f575-fe0f-200d-2642-fe0f", + "male_detective": "1f575-fe0f-200d-2642-fe0f", + "female-detective": "1f575-fe0f-200d-2640-fe0f", + "female_detective": "1f575-fe0f-200d-2640-fe0f", + "guardsman": "1f482", + "male-guard": "1f482-200d-2642-fe0f", + "female-guard": "1f482-200d-2640-fe0f", + "guardswoman": "1f482-200d-2640-fe0f", + "ninja": "1f977", + "construction_worker": "1f477", + "male-construction-worker": "1f477-200d-2642-fe0f", + "construction_worker_man": "1f477-200d-2642-fe0f", + "female-construction-worker": "1f477-200d-2640-fe0f", + "construction_worker_woman": "1f477-200d-2640-fe0f", + "prince": "1f934", + "princess": "1f478", + "man_with_turban": "1f473", + "man-wearing-turban": "1f473-200d-2642-fe0f", + "woman-wearing-turban": "1f473-200d-2640-fe0f", + "woman_with_turban": "1f473-200d-2640-fe0f", + "man_with_gua_pi_mao": "1f472", + "person_with_headscarf": "1f9d5", + "person_in_tuxedo": "1f935", + "man_in_tuxedo": "1f935-200d-2642-fe0f", + "woman_in_tuxedo": "1f935-200d-2640-fe0f", + "bride_with_veil": "1f470", + "man_with_veil": "1f470-200d-2642-fe0f", + "woman_with_veil": "1f470-200d-2640-fe0f", + "pregnant_woman": "1f930", + "breast-feeding": "1f931", + "woman_feeding_baby": "1f469-200d-1f37c", + "man_feeding_baby": "1f468-200d-1f37c", + "person_feeding_baby": "1f9d1-200d-1f37c", + "angel": "1f47c", + "santa": "1f385", + "mrs_claus": "1f936", + "mother_christmas": "1f936", + "mx_claus": "1f9d1-200d-1f384", + "superhero": "1f9b8", + "male_superhero": "1f9b8-200d-2642-fe0f", + "female_superhero": "1f9b8-200d-2640-fe0f", + "supervillain": "1f9b9", + "male_supervillain": "1f9b9-200d-2642-fe0f", + "female_supervillain": "1f9b9-200d-2640-fe0f", + "mage": "1f9d9", + "male_mage": "1f9d9-200d-2642-fe0f", + "female_mage": "1f9d9-200d-2640-fe0f", + "fairy": "1f9da", + "male_fairy": "1f9da-200d-2642-fe0f", + "female_fairy": "1f9da-200d-2640-fe0f", + "vampire": "1f9db", + "male_vampire": "1f9db-200d-2642-fe0f", + "female_vampire": "1f9db-200d-2640-fe0f", + "merperson": "1f9dc", + "merman": "1f9dc-200d-2642-fe0f", + "mermaid": "1f9dc-200d-2640-fe0f", + "elf": "1f9dd", + "male_elf": "1f9dd-200d-2642-fe0f", + "female_elf": "1f9dd-200d-2640-fe0f", + "genie": "1f9de", + "male_genie": "1f9de-200d-2642-fe0f", + "female_genie": "1f9de-200d-2640-fe0f", + "zombie": "1f9df", + "male_zombie": "1f9df-200d-2642-fe0f", + "female_zombie": "1f9df-200d-2640-fe0f", + "massage": "1f486", + "man-getting-massage": "1f486-200d-2642-fe0f", + "massage_man": "1f486-200d-2642-fe0f", + "woman-getting-massage": "1f486-200d-2640-fe0f", + "massage_woman": "1f486-200d-2640-fe0f", + "haircut": "1f487", + "man-getting-haircut": "1f487-200d-2642-fe0f", + "haircut_man": "1f487-200d-2642-fe0f", + "woman-getting-haircut": "1f487-200d-2640-fe0f", + "haircut_woman": "1f487-200d-2640-fe0f", + "walking": "1f6b6", + "man-walking": "1f6b6-200d-2642-fe0f", + "walking_man": "1f6b6-200d-2642-fe0f", + "woman-walking": "1f6b6-200d-2640-fe0f", + "walking_woman": "1f6b6-200d-2640-fe0f", + "standing_person": "1f9cd", + "man_standing": "1f9cd-200d-2642-fe0f", + "woman_standing": "1f9cd-200d-2640-fe0f", + "kneeling_person": "1f9ce", + "man_kneeling": "1f9ce-200d-2642-fe0f", + "woman_kneeling": "1f9ce-200d-2640-fe0f", + "person_with_probing_cane": "1f9d1-200d-1f9af", + "man_with_probing_cane": "1f468-200d-1f9af", + "woman_with_probing_cane": "1f469-200d-1f9af", + "person_in_motorized_wheelchair": "1f9d1-200d-1f9bc", + "man_in_motorized_wheelchair": "1f468-200d-1f9bc", + "woman_in_motorized_wheelchair": "1f469-200d-1f9bc", + "person_in_manual_wheelchair": "1f9d1-200d-1f9bd", + "man_in_manual_wheelchair": "1f468-200d-1f9bd", + "woman_in_manual_wheelchair": "1f469-200d-1f9bd", + "runner": "1f3c3", + "running": "1f3c3", + "man-running": "1f3c3-200d-2642-fe0f", + "running_man": "1f3c3-200d-2642-fe0f", + "woman-running": "1f3c3-200d-2640-fe0f", + "running_woman": "1f3c3-200d-2640-fe0f", + "dancer": "1f483", + "man_dancing": "1f57a", + "man_in_business_suit_levitating": "1f574-fe0f", + "business_suit_levitating": "1f574-fe0f", + "dancers": "1f46f", + "man-with-bunny-ears-partying": "1f46f-200d-2642-fe0f", + "dancing_men": "1f46f-200d-2642-fe0f", + "woman-with-bunny-ears-partying": "1f46f-200d-2640-fe0f", + "dancing_women": "1f46f-200d-2640-fe0f", + "person_in_steamy_room": "1f9d6", + "man_in_steamy_room": "1f9d6-200d-2642-fe0f", + "woman_in_steamy_room": "1f9d6-200d-2640-fe0f", + "person_climbing": "1f9d7", + "man_climbing": "1f9d7-200d-2642-fe0f", + "woman_climbing": "1f9d7-200d-2640-fe0f", + "fencer": "1f93a", + "person_fencing": "1f93a", + "horse_racing": "1f3c7", + "skier": "26f7-fe0f", + "snowboarder": "1f3c2", + "golfer": "1f3cc-fe0f", + "man-golfing": "1f3cc-fe0f-200d-2642-fe0f", + "golfing_man": "1f3cc-fe0f-200d-2642-fe0f", + "woman-golfing": "1f3cc-fe0f-200d-2640-fe0f", + "golfing_woman": "1f3cc-fe0f-200d-2640-fe0f", + "surfer": "1f3c4", + "man-surfing": "1f3c4-200d-2642-fe0f", + "surfing_man": "1f3c4-200d-2642-fe0f", + "woman-surfing": "1f3c4-200d-2640-fe0f", + "surfing_woman": "1f3c4-200d-2640-fe0f", + "rowboat": "1f6a3", + "man-rowing-boat": "1f6a3-200d-2642-fe0f", + "rowing_man": "1f6a3-200d-2642-fe0f", + "woman-rowing-boat": "1f6a3-200d-2640-fe0f", + "rowing_woman": "1f6a3-200d-2640-fe0f", + "swimmer": "1f3ca", + "man-swimming": "1f3ca-200d-2642-fe0f", + "swimming_man": "1f3ca-200d-2642-fe0f", + "woman-swimming": "1f3ca-200d-2640-fe0f", + "swimming_woman": "1f3ca-200d-2640-fe0f", + "person_with_ball": "26f9-fe0f", + "man-bouncing-ball": "26f9-fe0f-200d-2642-fe0f", + "basketball_man": "26f9-fe0f-200d-2642-fe0f", + "woman-bouncing-ball": "26f9-fe0f-200d-2640-fe0f", + "basketball_woman": "26f9-fe0f-200d-2640-fe0f", + "weight_lifter": "1f3cb-fe0f", + "man-lifting-weights": "1f3cb-fe0f-200d-2642-fe0f", + "weight_lifting_man": "1f3cb-fe0f-200d-2642-fe0f", + "woman-lifting-weights": "1f3cb-fe0f-200d-2640-fe0f", + "weight_lifting_woman": "1f3cb-fe0f-200d-2640-fe0f", + "bicyclist": "1f6b4", + "man-biking": "1f6b4-200d-2642-fe0f", + "biking_man": "1f6b4-200d-2642-fe0f", + "woman-biking": "1f6b4-200d-2640-fe0f", + "biking_woman": "1f6b4-200d-2640-fe0f", + "mountain_bicyclist": "1f6b5", + "man-mountain-biking": "1f6b5-200d-2642-fe0f", + "mountain_biking_man": "1f6b5-200d-2642-fe0f", + "woman-mountain-biking": "1f6b5-200d-2640-fe0f", + "mountain_biking_woman": "1f6b5-200d-2640-fe0f", + "person_doing_cartwheel": "1f938", + "man-cartwheeling": "1f938-200d-2642-fe0f", + "man_cartwheeling": "1f938-200d-2642-fe0f", + "woman-cartwheeling": "1f938-200d-2640-fe0f", + "woman_cartwheeling": "1f938-200d-2640-fe0f", + "wrestlers": "1f93c", + "man-wrestling": "1f93c-200d-2642-fe0f", + "men_wrestling": "1f93c-200d-2642-fe0f", + "woman-wrestling": "1f93c-200d-2640-fe0f", + "women_wrestling": "1f93c-200d-2640-fe0f", + "water_polo": "1f93d", + "man-playing-water-polo": "1f93d-200d-2642-fe0f", + "man_playing_water_polo": "1f93d-200d-2642-fe0f", + "woman-playing-water-polo": "1f93d-200d-2640-fe0f", + "woman_playing_water_polo": "1f93d-200d-2640-fe0f", + "handball": "1f93e", + "man-playing-handball": "1f93e-200d-2642-fe0f", + "man_playing_handball": "1f93e-200d-2642-fe0f", + "woman-playing-handball": "1f93e-200d-2640-fe0f", + "woman_playing_handball": "1f93e-200d-2640-fe0f", + "juggling": "1f939", + "man-juggling": "1f939-200d-2642-fe0f", + "man_juggling": "1f939-200d-2642-fe0f", + "woman-juggling": "1f939-200d-2640-fe0f", + "woman_juggling": "1f939-200d-2640-fe0f", + "person_in_lotus_position": "1f9d8", + "man_in_lotus_position": "1f9d8-200d-2642-fe0f", + "woman_in_lotus_position": "1f9d8-200d-2640-fe0f", + "bath": "1f6c0", + "sleeping_accommodation": "1f6cc", + "sleeping_bed": "1f6cc", + "people_holding_hands": "1f9d1-200d-1f91d-200d-1f9d1", + "two_women_holding_hands": "1f46d", + "women_holding_hands": "1f46d", + "man_and_woman_holding_hands": "1f46b", + "woman_and_man_holding_hands": "1f46b", + "couple": "1f46b", + "two_men_holding_hands": "1f46c", + "men_holding_hands": "1f46c", + "couplekiss": "1f48f", + "woman-kiss-man": "1f469-200d-2764-fe0f-200d-1f48b-200d-1f468", + "couplekiss_man_woman": "1f469-200d-2764-fe0f-200d-1f48b-200d-1f468", + "man-kiss-man": "1f468-200d-2764-fe0f-200d-1f48b-200d-1f468", + "couplekiss_man_man": "1f468-200d-2764-fe0f-200d-1f48b-200d-1f468", + "woman-kiss-woman": "1f469-200d-2764-fe0f-200d-1f48b-200d-1f469", + "couplekiss_woman_woman": "1f469-200d-2764-fe0f-200d-1f48b-200d-1f469", + "couple_with_heart": "1f491", + "woman-heart-man": "1f469-200d-2764-fe0f-200d-1f468", + "couple_with_heart_woman_man": "1f469-200d-2764-fe0f-200d-1f468", + "man-heart-man": "1f468-200d-2764-fe0f-200d-1f468", + "couple_with_heart_man_man": "1f468-200d-2764-fe0f-200d-1f468", + "woman-heart-woman": "1f469-200d-2764-fe0f-200d-1f469", + "couple_with_heart_woman_woman": "1f469-200d-2764-fe0f-200d-1f469", + "family": "1f46a", + "man-woman-boy": "1f468-200d-1f469-200d-1f466", + "family_man_woman_boy": "1f468-200d-1f469-200d-1f466", + "man-woman-girl": "1f468-200d-1f469-200d-1f467", + "family_man_woman_girl": "1f468-200d-1f469-200d-1f467", + "man-woman-girl-boy": "1f468-200d-1f469-200d-1f467-200d-1f466", + "family_man_woman_girl_boy": "1f468-200d-1f469-200d-1f467-200d-1f466", + "man-woman-boy-boy": "1f468-200d-1f469-200d-1f466-200d-1f466", + "family_man_woman_boy_boy": "1f468-200d-1f469-200d-1f466-200d-1f466", + "man-woman-girl-girl": "1f468-200d-1f469-200d-1f467-200d-1f467", + "family_man_woman_girl_girl": "1f468-200d-1f469-200d-1f467-200d-1f467", + "man-man-boy": "1f468-200d-1f468-200d-1f466", + "family_man_man_boy": "1f468-200d-1f468-200d-1f466", + "man-man-girl": "1f468-200d-1f468-200d-1f467", + "family_man_man_girl": "1f468-200d-1f468-200d-1f467", + "man-man-girl-boy": "1f468-200d-1f468-200d-1f467-200d-1f466", + "family_man_man_girl_boy": "1f468-200d-1f468-200d-1f467-200d-1f466", + "man-man-boy-boy": "1f468-200d-1f468-200d-1f466-200d-1f466", + "family_man_man_boy_boy": "1f468-200d-1f468-200d-1f466-200d-1f466", + "man-man-girl-girl": "1f468-200d-1f468-200d-1f467-200d-1f467", + "family_man_man_girl_girl": "1f468-200d-1f468-200d-1f467-200d-1f467", + "woman-woman-boy": "1f469-200d-1f469-200d-1f466", + "family_woman_woman_boy": "1f469-200d-1f469-200d-1f466", + "woman-woman-girl": "1f469-200d-1f469-200d-1f467", + "family_woman_woman_girl": "1f469-200d-1f469-200d-1f467", + "woman-woman-girl-boy": "1f469-200d-1f469-200d-1f467-200d-1f466", + "family_woman_woman_girl_boy": "1f469-200d-1f469-200d-1f467-200d-1f466", + "woman-woman-boy-boy": "1f469-200d-1f469-200d-1f466-200d-1f466", + "family_woman_woman_boy_boy": "1f469-200d-1f469-200d-1f466-200d-1f466", + "woman-woman-girl-girl": "1f469-200d-1f469-200d-1f467-200d-1f467", + "family_woman_woman_girl_girl": "1f469-200d-1f469-200d-1f467-200d-1f467", + "man-boy": "1f468-200d-1f466", + "family_man_boy": "1f468-200d-1f466", + "man-boy-boy": "1f468-200d-1f466-200d-1f466", + "family_man_boy_boy": "1f468-200d-1f466-200d-1f466", + "man-girl": "1f468-200d-1f467", + "family_man_girl": "1f468-200d-1f467", + "man-girl-boy": "1f468-200d-1f467-200d-1f466", + "family_man_girl_boy": "1f468-200d-1f467-200d-1f466", + "man-girl-girl": "1f468-200d-1f467-200d-1f467", + "family_man_girl_girl": "1f468-200d-1f467-200d-1f467", + "woman-boy": "1f469-200d-1f466", + "family_woman_boy": "1f469-200d-1f466", + "woman-boy-boy": "1f469-200d-1f466-200d-1f466", + "family_woman_boy_boy": "1f469-200d-1f466-200d-1f466", + "woman-girl": "1f469-200d-1f467", + "family_woman_girl": "1f469-200d-1f467", + "woman-girl-boy": "1f469-200d-1f467-200d-1f466", + "family_woman_girl_boy": "1f469-200d-1f467-200d-1f466", + "woman-girl-girl": "1f469-200d-1f467-200d-1f467", + "family_woman_girl_girl": "1f469-200d-1f467-200d-1f467", + "speaking_head_in_silhouette": "1f5e3-fe0f", + "speaking_head": "1f5e3-fe0f", + "bust_in_silhouette": "1f464", + "busts_in_silhouette": "1f465", + "people_hugging": "1fac2", + "footprints": "1f463", + "skin-tone-2": "1f3fb", + "skin-tone-3": "1f3fc", + "skin-tone-4": "1f3fd", + "skin-tone-5": "1f3fe", + "skin-tone-6": "1f3ff", + "monkey_face": "1f435", + "monkey": "1f412", + "gorilla": "1f98d", + "orangutan": "1f9a7", + "dog": "1f436", + "dog2": "1f415", + "guide_dog": "1f9ae", + "service_dog": "1f415-200d-1f9ba", + "poodle": "1f429", + "wolf": "1f43a", + "fox_face": "1f98a", + "raccoon": "1f99d", + "cat": "1f431", + "cat2": "1f408", + "black_cat": "1f408-200d-2b1b", + "lion_face": "1f981", + "lion": "1f981", + "tiger": "1f42f", + "tiger2": "1f405", + "leopard": "1f406", + "horse": "1f434", + "racehorse": "1f40e", + "unicorn_face": "1f984", + "unicorn": "1f984", + "zebra_face": "1f993", + "deer": "1f98c", + "bison": "1f9ac", + "cow": "1f42e", + "ox": "1f402", + "water_buffalo": "1f403", + "cow2": "1f404", + "pig": "1f437", + "pig2": "1f416", + "boar": "1f417", + "pig_nose": "1f43d", + "ram": "1f40f", + "sheep": "1f411", + "goat": "1f410", + "dromedary_camel": "1f42a", + "camel": "1f42b", + "llama": "1f999", + "giraffe_face": "1f992", + "elephant": "1f418", + "mammoth": "1f9a3", + "rhinoceros": "1f98f", + "hippopotamus": "1f99b", + "mouse": "1f42d", + "mouse2": "1f401", + "rat": "1f400", + "hamster": "1f439", + "rabbit": "1f430", + "rabbit2": "1f407", + "chipmunk": "1f43f-fe0f", + "beaver": "1f9ab", + "hedgehog": "1f994", + "bat": "1f987", + "bear": "1f43b", + "polar_bear": "1f43b-200d-2744-fe0f", + "koala": "1f428", + "panda_face": "1f43c", + "sloth": "1f9a5", + "otter": "1f9a6", + "skunk": "1f9a8", + "kangaroo": "1f998", + "badger": "1f9a1", + "feet": "1f43e", + "paw_prints": "1f43e", + "turkey": "1f983", + "chicken": "1f414", + "rooster": "1f413", + "hatching_chick": "1f423", + "baby_chick": "1f424", + "hatched_chick": "1f425", + "bird": "1f426", + "penguin": "1f427", + "dove_of_peace": "1f54a-fe0f", + "dove": "1f54a-fe0f", + "eagle": "1f985", + "duck": "1f986", + "swan": "1f9a2", + "owl": "1f989", + "dodo": "1f9a4", + "feather": "1fab6", + "flamingo": "1f9a9", + "peacock": "1f99a", + "parrot": "1f99c", + "frog": "1f438", + "crocodile": "1f40a", + "turtle": "1f422", + "lizard": "1f98e", + "snake": "1f40d", + "dragon_face": "1f432", + "dragon": "1f409", + "sauropod": "1f995", + "t-rex": "1f996", + "whale": "1f433", + "whale2": "1f40b", + "dolphin": "1f42c", + "flipper": "1f42c", + "seal": "1f9ad", + "fish": "1f41f", + "tropical_fish": "1f420", + "blowfish": "1f421", + "shark": "1f988", + "octopus": "1f419", + "shell": "1f41a", + "snail": "1f40c", + "butterfly": "1f98b", + "bug": "1f41b", + "ant": "1f41c", + "bee": "1f41d", + "honeybee": "1f41d", + "beetle": "1fab2", + "ladybug": "1f41e", + "lady_beetle": "1f41e", + "cricket": "1f997", + "cockroach": "1fab3", + "spider": "1f577-fe0f", + "spider_web": "1f578-fe0f", + "scorpion": "1f982", + "mosquito": "1f99f", + "fly": "1fab0", + "worm": "1fab1", + "microbe": "1f9a0", + "bouquet": "1f490", + "cherry_blossom": "1f338", + "white_flower": "1f4ae", + "rosette": "1f3f5-fe0f", + "rose": "1f339", + "wilted_flower": "1f940", + "hibiscus": "1f33a", + "sunflower": "1f33b", + "blossom": "1f33c", + "tulip": "1f337", + "seedling": "1f331", + "potted_plant": "1fab4", + "evergreen_tree": "1f332", + "deciduous_tree": "1f333", + "palm_tree": "1f334", + "cactus": "1f335", + "ear_of_rice": "1f33e", + "herb": "1f33f", + "shamrock": "2618-fe0f", + "four_leaf_clover": "1f340", + "maple_leaf": "1f341", + "fallen_leaf": "1f342", + "leaves": "1f343", + "grapes": "1f347", + "melon": "1f348", + "watermelon": "1f349", + "tangerine": "1f34a", + "mandarin": "1f34a", + "orange": "1f34a", + "lemon": "1f34b", + "banana": "1f34c", + "pineapple": "1f34d", + "mango": "1f96d", + "apple": "1f34e", + "green_apple": "1f34f", + "pear": "1f350", + "peach": "1f351", + "cherries": "1f352", + "strawberry": "1f353", + "blueberries": "1fad0", + "kiwifruit": "1f95d", + "kiwi_fruit": "1f95d", + "tomato": "1f345", + "olive": "1fad2", + "coconut": "1f965", + "avocado": "1f951", + "eggplant": "1f346", + "potato": "1f954", + "carrot": "1f955", + "corn": "1f33d", + "hot_pepper": "1f336-fe0f", + "bell_pepper": "1fad1", + "cucumber": "1f952", + "leafy_green": "1f96c", + "broccoli": "1f966", + "garlic": "1f9c4", + "onion": "1f9c5", + "mushroom": "1f344", + "peanuts": "1f95c", + "chestnut": "1f330", + "bread": "1f35e", + "croissant": "1f950", + "baguette_bread": "1f956", + "flatbread": "1fad3", + "pretzel": "1f968", + "bagel": "1f96f", + "pancakes": "1f95e", + "waffle": "1f9c7", + "cheese_wedge": "1f9c0", + "cheese": "1f9c0", + "meat_on_bone": "1f356", + "poultry_leg": "1f357", + "cut_of_meat": "1f969", + "bacon": "1f953", + "hamburger": "1f354", + "fries": "1f35f", + "pizza": "1f355", + "hotdog": "1f32d", + "sandwich": "1f96a", + "taco": "1f32e", + "burrito": "1f32f", + "tamale": "1fad4", + "stuffed_flatbread": "1f959", + "falafel": "1f9c6", + "egg": "1f95a", + "fried_egg": "1f373", + "cooking": "1f373", + "shallow_pan_of_food": "1f958", + "stew": "1f372", + "fondue": "1fad5", + "bowl_with_spoon": "1f963", + "green_salad": "1f957", + "popcorn": "1f37f", + "butter": "1f9c8", + "salt": "1f9c2", + "canned_food": "1f96b", + "bento": "1f371", + "rice_cracker": "1f358", + "rice_ball": "1f359", + "rice": "1f35a", + "curry": "1f35b", + "ramen": "1f35c", + "spaghetti": "1f35d", + "sweet_potato": "1f360", + "oden": "1f362", + "sushi": "1f363", + "fried_shrimp": "1f364", + "fish_cake": "1f365", + "moon_cake": "1f96e", + "dango": "1f361", + "dumpling": "1f95f", + "fortune_cookie": "1f960", + "takeout_box": "1f961", + "crab": "1f980", + "lobster": "1f99e", + "shrimp": "1f990", + "squid": "1f991", + "oyster": "1f9aa", + "icecream": "1f366", + "shaved_ice": "1f367", + "ice_cream": "1f368", + "doughnut": "1f369", + "cookie": "1f36a", + "birthday": "1f382", + "cake": "1f370", + "cupcake": "1f9c1", + "pie": "1f967", + "chocolate_bar": "1f36b", + "candy": "1f36c", + "lollipop": "1f36d", + "custard": "1f36e", + "honey_pot": "1f36f", + "baby_bottle": "1f37c", + "glass_of_milk": "1f95b", + "milk_glass": "1f95b", + "coffee": "2615", + "teapot": "1fad6", + "tea": "1f375", + "sake": "1f376", + "champagne": "1f37e", + "wine_glass": "1f377", + "cocktail": "1f378", + "tropical_drink": "1f379", + "beer": "1f37a", + "beers": "1f37b", + "clinking_glasses": "1f942", + "tumbler_glass": "1f943", + "cup_with_straw": "1f964", + "bubble_tea": "1f9cb", + "beverage_box": "1f9c3", + "mate_drink": "1f9c9", + "ice_cube": "1f9ca", + "chopsticks": "1f962", + "knife_fork_plate": "1f37d-fe0f", + "plate_with_cutlery": "1f37d-fe0f", + "fork_and_knife": "1f374", + "spoon": "1f944", + "hocho": "1f52a", + "knife": "1f52a", + "amphora": "1f3fa", + "earth_africa": "1f30d", + "earth_americas": "1f30e", + "earth_asia": "1f30f", + "globe_with_meridians": "1f310", + "world_map": "1f5fa-fe0f", + "japan": "1f5fe", + "compass": "1f9ed", + "snow_capped_mountain": "1f3d4-fe0f", + "mountain_snow": "1f3d4-fe0f", + "mountain": "26f0-fe0f", + "volcano": "1f30b", + "mount_fuji": "1f5fb", + "camping": "1f3d5-fe0f", + "beach_with_umbrella": "1f3d6-fe0f", + "beach_umbrella": "1f3d6-fe0f", + "desert": "1f3dc-fe0f", + "desert_island": "1f3dd-fe0f", + "national_park": "1f3de-fe0f", + "stadium": "1f3df-fe0f", + "classical_building": "1f3db-fe0f", + "building_construction": "1f3d7-fe0f", + "bricks": "1f9f1", + "rock": "1faa8", + "wood": "1fab5", + "hut": "1f6d6", + "house_buildings": "1f3d8-fe0f", + "houses": "1f3d8-fe0f", + "derelict_house_building": "1f3da-fe0f", + "derelict_house": "1f3da-fe0f", + "house": "1f3e0", + "house_with_garden": "1f3e1", + "office": "1f3e2", + "post_office": "1f3e3", + "european_post_office": "1f3e4", + "hospital": "1f3e5", + "bank": "1f3e6", + "hotel": "1f3e8", + "love_hotel": "1f3e9", + "convenience_store": "1f3ea", + "school": "1f3eb", + "department_store": "1f3ec", + "factory": "1f3ed", + "japanese_castle": "1f3ef", + "european_castle": "1f3f0", + "wedding": "1f492", + "tokyo_tower": "1f5fc", + "statue_of_liberty": "1f5fd", + "church": "26ea", + "mosque": "1f54c", + "hindu_temple": "1f6d5", + "synagogue": "1f54d", + "shinto_shrine": "26e9-fe0f", + "kaaba": "1f54b", + "fountain": "26f2", + "tent": "26fa", + "foggy": "1f301", + "night_with_stars": "1f303", + "cityscape": "1f3d9-fe0f", + "sunrise_over_mountains": "1f304", + "sunrise": "1f305", + "city_sunset": "1f306", + "city_sunrise": "1f307", + "bridge_at_night": "1f309", + "hotsprings": "2668-fe0f", + "carousel_horse": "1f3a0", + "ferris_wheel": "1f3a1", + "roller_coaster": "1f3a2", + "barber": "1f488", + "circus_tent": "1f3aa", + "steam_locomotive": "1f682", + "railway_car": "1f683", + "bullettrain_side": "1f684", + "bullettrain_front": "1f685", + "train2": "1f686", + "metro": "1f687", + "light_rail": "1f688", + "station": "1f689", + "tram": "1f68a", + "monorail": "1f69d", + "mountain_railway": "1f69e", + "train": "1f68b", + "bus": "1f68c", + "oncoming_bus": "1f68d", + "trolleybus": "1f68e", + "minibus": "1f690", + "ambulance": "1f691", + "fire_engine": "1f692", + "police_car": "1f693", + "oncoming_police_car": "1f694", + "taxi": "1f695", + "oncoming_taxi": "1f696", + "car": "1f697", + "red_car": "1f697", + "oncoming_automobile": "1f698", + "blue_car": "1f699", + "pickup_truck": "1f6fb", + "truck": "1f69a", + "articulated_lorry": "1f69b", + "tractor": "1f69c", + "racing_car": "1f3ce-fe0f", + "racing_motorcycle": "1f3cd-fe0f", + "motorcycle": "1f3cd-fe0f", + "motor_scooter": "1f6f5", + "manual_wheelchair": "1f9bd", + "motorized_wheelchair": "1f9bc", + "auto_rickshaw": "1f6fa", + "bike": "1f6b2", + "scooter": "1f6f4", + "kick_scooter": "1f6f4", + "skateboard": "1f6f9", + "roller_skate": "1f6fc", + "busstop": "1f68f", + "motorway": "1f6e3-fe0f", + "railway_track": "1f6e4-fe0f", + "oil_drum": "1f6e2-fe0f", + "fuelpump": "26fd", + "rotating_light": "1f6a8", + "traffic_light": "1f6a5", + "vertical_traffic_light": "1f6a6", + "octagonal_sign": "1f6d1", + "stop_sign": "1f6d1", + "construction": "1f6a7", + "anchor": "2693", + "boat": "26f5", + "sailboat": "26f5", + "canoe": "1f6f6", + "speedboat": "1f6a4", + "passenger_ship": "1f6f3-fe0f", + "ferry": "26f4-fe0f", + "motor_boat": "1f6e5-fe0f", + "ship": "1f6a2", + "airplane": "2708-fe0f", + "small_airplane": "1f6e9-fe0f", + "airplane_departure": "1f6eb", + "flight_departure": "1f6eb", + "airplane_arriving": "1f6ec", + "flight_arrival": "1f6ec", + "parachute": "1fa82", + "seat": "1f4ba", + "helicopter": "1f681", + "suspension_railway": "1f69f", + "mountain_cableway": "1f6a0", + "aerial_tramway": "1f6a1", + "satellite": "1f6f0-fe0f", + "artificial_satellite": "1f6f0-fe0f", + "rocket": "1f680", + "flying_saucer": "1f6f8", + "bellhop_bell": "1f6ce-fe0f", + "luggage": "1f9f3", + "hourglass": "231b", + "hourglass_flowing_sand": "23f3", + "watch": "231a", + "alarm_clock": "23f0", + "stopwatch": "23f1-fe0f", + "timer_clock": "23f2-fe0f", + "mantelpiece_clock": "1f570-fe0f", + "clock12": "1f55b", + "clock1230": "1f567", + "clock1": "1f550", + "clock130": "1f55c", + "clock2": "1f551", + "clock230": "1f55d", + "clock3": "1f552", + "clock330": "1f55e", + "clock4": "1f553", + "clock430": "1f55f", + "clock5": "1f554", + "clock530": "1f560", + "clock6": "1f555", + "clock630": "1f561", + "clock7": "1f556", + "clock730": "1f562", + "clock8": "1f557", + "clock830": "1f563", + "clock9": "1f558", + "clock930": "1f564", + "clock10": "1f559", + "clock1030": "1f565", + "clock11": "1f55a", + "clock1130": "1f566", + "new_moon": "1f311", + "waxing_crescent_moon": "1f312", + "first_quarter_moon": "1f313", + "moon": "1f314", + "waxing_gibbous_moon": "1f314", + "full_moon": "1f315", + "waning_gibbous_moon": "1f316", + "last_quarter_moon": "1f317", + "waning_crescent_moon": "1f318", + "crescent_moon": "1f319", + "new_moon_with_face": "1f31a", + "first_quarter_moon_with_face": "1f31b", + "last_quarter_moon_with_face": "1f31c", + "thermometer": "1f321-fe0f", + "sunny": "2600-fe0f", + "full_moon_with_face": "1f31d", + "sun_with_face": "1f31e", + "ringed_planet": "1fa90", + "star": "2b50", + "star2": "1f31f", + "stars": "1f320", + "milky_way": "1f30c", + "cloud": "2601-fe0f", + "partly_sunny": "26c5", + "thunder_cloud_and_rain": "26c8-fe0f", + "cloud_with_lightning_and_rain": "26c8-fe0f", + "mostly_sunny": "1f324-fe0f", + "sun_small_cloud": "1f324-fe0f", + "sun_behind_small_cloud": "1f324-fe0f", + "barely_sunny": "1f325-fe0f", + "sun_behind_cloud": "1f325-fe0f", + "sun_behind_large_cloud": "1f325-fe0f", + "partly_sunny_rain": "1f326-fe0f", + "sun_behind_rain_cloud": "1f326-fe0f", + "rain_cloud": "1f327-fe0f", + "cloud_with_rain": "1f327-fe0f", + "snow_cloud": "1f328-fe0f", + "cloud_with_snow": "1f328-fe0f", + "lightning": "1f329-fe0f", + "lightning_cloud": "1f329-fe0f", + "cloud_with_lightning": "1f329-fe0f", + "tornado": "1f32a-fe0f", + "tornado_cloud": "1f32a-fe0f", + "fog": "1f32b-fe0f", + "wind_blowing_face": "1f32c-fe0f", + "wind_face": "1f32c-fe0f", + "cyclone": "1f300", + "rainbow": "1f308", + "closed_umbrella": "1f302", + "umbrella": "2602-fe0f", + "open_umbrella": "2602-fe0f", + "umbrella_with_rain_drops": "2614", + "umbrella_on_ground": "26f1-fe0f", + "parasol_on_ground": "26f1-fe0f", + "zap": "26a1", + "snowflake": "2744-fe0f", + "snowman": "2603-fe0f", + "snowman_with_snow": "2603-fe0f", + "snowman_without_snow": "26c4", + "comet": "2604-fe0f", + "fire": "1f525", + "droplet": "1f4a7", + "ocean": "1f30a", + "jack_o_lantern": "1f383", + "christmas_tree": "1f384", + "fireworks": "1f386", + "sparkler": "1f387", + "firecracker": "1f9e8", + "sparkles": "2728", + "balloon": "1f388", + "tada": "1f389", + "confetti_ball": "1f38a", + "tanabata_tree": "1f38b", + "bamboo": "1f38d", + "dolls": "1f38e", + "flags": "1f38f", + "wind_chime": "1f390", + "rice_scene": "1f391", + "red_envelope": "1f9e7", + "ribbon": "1f380", + "gift": "1f381", + "reminder_ribbon": "1f397-fe0f", + "admission_tickets": "1f39f-fe0f", + "tickets": "1f39f-fe0f", + "ticket": "1f3ab", + "medal": "1f396-fe0f", + "medal_military": "1f396-fe0f", + "trophy": "1f3c6", + "sports_medal": "1f3c5", + "medal_sports": "1f3c5", + "first_place_medal": "1f947", + "1st_place_medal": "1f947", + "second_place_medal": "1f948", + "2nd_place_medal": "1f948", + "third_place_medal": "1f949", + "3rd_place_medal": "1f949", + "soccer": "26bd", + "baseball": "26be", + "softball": "1f94e", + "basketball": "1f3c0", + "volleyball": "1f3d0", + "football": "1f3c8", + "rugby_football": "1f3c9", + "tennis": "1f3be", + "flying_disc": "1f94f", + "bowling": "1f3b3", + "cricket_bat_and_ball": "1f3cf", + "field_hockey_stick_and_ball": "1f3d1", + "field_hockey": "1f3d1", + "ice_hockey_stick_and_puck": "1f3d2", + "ice_hockey": "1f3d2", + "lacrosse": "1f94d", + "table_tennis_paddle_and_ball": "1f3d3", + "ping_pong": "1f3d3", + "badminton_racquet_and_shuttlecock": "1f3f8", + "badminton": "1f3f8", + "boxing_glove": "1f94a", + "martial_arts_uniform": "1f94b", + "goal_net": "1f945", + "golf": "26f3", + "ice_skate": "26f8-fe0f", + "fishing_pole_and_fish": "1f3a3", + "diving_mask": "1f93f", + "running_shirt_with_sash": "1f3bd", + "ski": "1f3bf", + "sled": "1f6f7", + "curling_stone": "1f94c", + "dart": "1f3af", + "yo-yo": "1fa80", + "kite": "1fa81", + "8ball": "1f3b1", + "crystal_ball": "1f52e", + "magic_wand": "1fa84", + "nazar_amulet": "1f9ff", + "video_game": "1f3ae", + "joystick": "1f579-fe0f", + "slot_machine": "1f3b0", + "game_die": "1f3b2", + "jigsaw": "1f9e9", + "teddy_bear": "1f9f8", + "pinata": "1fa85", + "nesting_dolls": "1fa86", + "spades": "2660-fe0f", + "hearts": "2665-fe0f", + "diamonds": "2666-fe0f", + "clubs": "2663-fe0f", + "chess_pawn": "265f-fe0f", + "black_joker": "1f0cf", + "mahjong": "1f004", + "flower_playing_cards": "1f3b4", + "performing_arts": "1f3ad", + "frame_with_picture": "1f5bc-fe0f", + "framed_picture": "1f5bc-fe0f", + "art": "1f3a8", + "thread": "1f9f5", + "sewing_needle": "1faa1", + "yarn": "1f9f6", + "knot": "1faa2", + "eyeglasses": "1f453", + "dark_sunglasses": "1f576-fe0f", + "goggles": "1f97d", + "lab_coat": "1f97c", + "safety_vest": "1f9ba", + "necktie": "1f454", + "shirt": "1f455", + "tshirt": "1f455", + "jeans": "1f456", + "scarf": "1f9e3", + "gloves": "1f9e4", + "coat": "1f9e5", + "socks": "1f9e6", + "dress": "1f457", + "kimono": "1f458", + "sari": "1f97b", + "one-piece_swimsuit": "1fa71", + "briefs": "1fa72", + "shorts": "1fa73", + "bikini": "1f459", + "womans_clothes": "1f45a", + "purse": "1f45b", + "handbag": "1f45c", + "pouch": "1f45d", + "shopping_bags": "1f6cd-fe0f", + "shopping": "1f6cd-fe0f", + "school_satchel": "1f392", + "thong_sandal": "1fa74", + "mans_shoe": "1f45e", + "shoe": "1f45e", + "athletic_shoe": "1f45f", + "hiking_boot": "1f97e", + "womans_flat_shoe": "1f97f", + "high_heel": "1f460", + "sandal": "1f461", + "ballet_shoes": "1fa70", + "boot": "1f462", + "crown": "1f451", + "womans_hat": "1f452", + "tophat": "1f3a9", + "mortar_board": "1f393", + "billed_cap": "1f9e2", + "military_helmet": "1fa96", + "helmet_with_white_cross": "26d1-fe0f", + "rescue_worker_helmet": "26d1-fe0f", + "prayer_beads": "1f4ff", + "lipstick": "1f484", + "ring": "1f48d", + "gem": "1f48e", + "mute": "1f507", + "speaker": "1f508", + "sound": "1f509", + "loud_sound": "1f50a", + "loudspeaker": "1f4e2", + "mega": "1f4e3", + "postal_horn": "1f4ef", + "bell": "1f514", + "no_bell": "1f515", + "musical_score": "1f3bc", + "musical_note": "1f3b5", + "notes": "1f3b6", + "studio_microphone": "1f399-fe0f", + "level_slider": "1f39a-fe0f", + "control_knobs": "1f39b-fe0f", + "microphone": "1f3a4", + "headphones": "1f3a7", + "radio": "1f4fb", + "saxophone": "1f3b7", + "accordion": "1fa97", + "guitar": "1f3b8", + "musical_keyboard": "1f3b9", + "trumpet": "1f3ba", + "violin": "1f3bb", + "banjo": "1fa95", + "drum_with_drumsticks": "1f941", + "drum": "1f941", + "long_drum": "1fa98", + "iphone": "1f4f1", + "calling": "1f4f2", + "phone": "260e-fe0f", + "telephone": "260e-fe0f", + "telephone_receiver": "1f4de", + "pager": "1f4df", + "fax": "1f4e0", + "battery": "1f50b", + "electric_plug": "1f50c", + "computer": "1f4bb", + "desktop_computer": "1f5a5-fe0f", + "printer": "1f5a8-fe0f", + "keyboard": "2328-fe0f", + "three_button_mouse": "1f5b1-fe0f", + "computer_mouse": "1f5b1-fe0f", + "trackball": "1f5b2-fe0f", + "minidisc": "1f4bd", + "floppy_disk": "1f4be", + "cd": "1f4bf", + "dvd": "1f4c0", + "abacus": "1f9ee", + "movie_camera": "1f3a5", + "film_frames": "1f39e-fe0f", + "film_strip": "1f39e-fe0f", + "film_projector": "1f4fd-fe0f", + "clapper": "1f3ac", + "tv": "1f4fa", + "camera": "1f4f7", + "camera_with_flash": "1f4f8", + "camera_flash": "1f4f8", + "video_camera": "1f4f9", + "vhs": "1f4fc", + "mag": "1f50d", + "mag_right": "1f50e", + "candle": "1f56f-fe0f", + "bulb": "1f4a1", + "flashlight": "1f526", + "izakaya_lantern": "1f3ee", + "lantern": "1f3ee", + "diya_lamp": "1fa94", + "notebook_with_decorative_cover": "1f4d4", + "closed_book": "1f4d5", + "book": "1f4d6", + "open_book": "1f4d6", + "green_book": "1f4d7", + "blue_book": "1f4d8", + "orange_book": "1f4d9", + "books": "1f4da", + "notebook": "1f4d3", + "ledger": "1f4d2", + "page_with_curl": "1f4c3", + "scroll": "1f4dc", + "page_facing_up": "1f4c4", + "newspaper": "1f4f0", + "rolled_up_newspaper": "1f5de-fe0f", + "newspaper_roll": "1f5de-fe0f", + "bookmark_tabs": "1f4d1", + "bookmark": "1f516", + "label": "1f3f7-fe0f", + "moneybag": "1f4b0", + "coin": "1fa99", + "yen": "1f4b4", + "dollar": "1f4b5", + "euro": "1f4b6", + "pound": "1f4b7", + "money_with_wings": "1f4b8", + "credit_card": "1f4b3", + "receipt": "1f9fe", + "chart": "1f4b9", + "email": "2709-fe0f", + "envelope": "2709-fe0f", + "e-mail": "1f4e7", + "incoming_envelope": "1f4e8", + "envelope_with_arrow": "1f4e9", + "outbox_tray": "1f4e4", + "inbox_tray": "1f4e5", + "package": "1f4e6", + "mailbox": "1f4eb", + "mailbox_closed": "1f4ea", + "mailbox_with_mail": "1f4ec", + "mailbox_with_no_mail": "1f4ed", + "postbox": "1f4ee", + "ballot_box_with_ballot": "1f5f3-fe0f", + "ballot_box": "1f5f3-fe0f", + "pencil2": "270f-fe0f", + "black_nib": "2712-fe0f", + "lower_left_fountain_pen": "1f58b-fe0f", + "fountain_pen": "1f58b-fe0f", + "lower_left_ballpoint_pen": "1f58a-fe0f", + "pen": "1f58a-fe0f", + "lower_left_paintbrush": "1f58c-fe0f", + "paintbrush": "1f58c-fe0f", + "lower_left_crayon": "1f58d-fe0f", + "crayon": "1f58d-fe0f", + "memo": "1f4dd", + "pencil": "1f4dd", + "briefcase": "1f4bc", + "file_folder": "1f4c1", + "open_file_folder": "1f4c2", + "card_index_dividers": "1f5c2-fe0f", + "date": "1f4c5", + "calendar": "1f4c6", + "spiral_note_pad": "1f5d2-fe0f", + "spiral_notepad": "1f5d2-fe0f", + "spiral_calendar_pad": "1f5d3-fe0f", + "spiral_calendar": "1f5d3-fe0f", + "card_index": "1f4c7", + "chart_with_upwards_trend": "1f4c8", + "chart_with_downwards_trend": "1f4c9", + "bar_chart": "1f4ca", + "clipboard": "1f4cb", + "pushpin": "1f4cc", + "round_pushpin": "1f4cd", + "paperclip": "1f4ce", + "linked_paperclips": "1f587-fe0f", + "paperclips": "1f587-fe0f", + "straight_ruler": "1f4cf", + "triangular_ruler": "1f4d0", + "scissors": "2702-fe0f", + "card_file_box": "1f5c3-fe0f", + "file_cabinet": "1f5c4-fe0f", + "wastebasket": "1f5d1-fe0f", + "lock": "1f512", + "unlock": "1f513", + "lock_with_ink_pen": "1f50f", + "closed_lock_with_key": "1f510", + "key": "1f511", + "old_key": "1f5dd-fe0f", + "hammer": "1f528", + "axe": "1fa93", + "pick": "26cf-fe0f", + "hammer_and_pick": "2692-fe0f", + "hammer_and_wrench": "1f6e0-fe0f", + "dagger_knife": "1f5e1-fe0f", + "dagger": "1f5e1-fe0f", + "crossed_swords": "2694-fe0f", + "gun": "1f52b", + "boomerang": "1fa83", + "bow_and_arrow": "1f3f9", + "shield": "1f6e1-fe0f", + "carpentry_saw": "1fa9a", + "wrench": "1f527", + "screwdriver": "1fa9b", + "nut_and_bolt": "1f529", + "gear": "2699-fe0f", + "compression": "1f5dc-fe0f", + "clamp": "1f5dc-fe0f", + "scales": "2696-fe0f", + "balance_scale": "2696-fe0f", + "probing_cane": "1f9af", + "link": "1f517", + "chains": "26d3-fe0f", + "hook": "1fa9d", + "toolbox": "1f9f0", + "magnet": "1f9f2", + "ladder": "1fa9c", + "alembic": "2697-fe0f", + "test_tube": "1f9ea", + "petri_dish": "1f9eb", + "dna": "1f9ec", + "microscope": "1f52c", + "telescope": "1f52d", + "satellite_antenna": "1f4e1", + "syringe": "1f489", + "drop_of_blood": "1fa78", + "pill": "1f48a", + "adhesive_bandage": "1fa79", + "stethoscope": "1fa7a", + "door": "1f6aa", + "elevator": "1f6d7", + "mirror": "1fa9e", + "window": "1fa9f", + "bed": "1f6cf-fe0f", + "couch_and_lamp": "1f6cb-fe0f", + "chair": "1fa91", + "toilet": "1f6bd", + "plunger": "1faa0", + "shower": "1f6bf", + "bathtub": "1f6c1", + "mouse_trap": "1faa4", + "razor": "1fa92", + "lotion_bottle": "1f9f4", + "safety_pin": "1f9f7", + "broom": "1f9f9", + "basket": "1f9fa", + "roll_of_paper": "1f9fb", + "bucket": "1faa3", + "soap": "1f9fc", + "toothbrush": "1faa5", + "sponge": "1f9fd", + "fire_extinguisher": "1f9ef", + "shopping_trolley": "1f6d2", + "shopping_cart": "1f6d2", + "smoking": "1f6ac", + "coffin": "26b0-fe0f", + "headstone": "1faa6", + "funeral_urn": "26b1-fe0f", + "moyai": "1f5ff", + "placard": "1faa7", + "atm": "1f3e7", + "put_litter_in_its_place": "1f6ae", + "potable_water": "1f6b0", + "wheelchair": "267f", + "mens": "1f6b9", + "womens": "1f6ba", + "restroom": "1f6bb", + "baby_symbol": "1f6bc", + "wc": "1f6be", + "passport_control": "1f6c2", + "customs": "1f6c3", + "baggage_claim": "1f6c4", + "left_luggage": "1f6c5", + "warning": "26a0-fe0f", + "children_crossing": "1f6b8", + "no_entry": "26d4", + "no_entry_sign": "1f6ab", + "no_bicycles": "1f6b3", + "no_smoking": "1f6ad", + "do_not_litter": "1f6af", + "non-potable_water": "1f6b1", + "no_pedestrians": "1f6b7", + "no_mobile_phones": "1f4f5", + "underage": "1f51e", + "radioactive_sign": "2622-fe0f", + "radioactive": "2622-fe0f", + "biohazard_sign": "2623-fe0f", + "biohazard": "2623-fe0f", + "arrow_up": "2b06-fe0f", + "arrow_upper_right": "2197-fe0f", + "arrow_right": "27a1-fe0f", + "arrow_lower_right": "2198-fe0f", + "arrow_down": "2b07-fe0f", + "arrow_lower_left": "2199-fe0f", + "arrow_left": "2b05-fe0f", + "arrow_upper_left": "2196-fe0f", + "arrow_up_down": "2195-fe0f", + "left_right_arrow": "2194-fe0f", + "leftwards_arrow_with_hook": "21a9-fe0f", + "arrow_right_hook": "21aa-fe0f", + "arrow_heading_up": "2934-fe0f", + "arrow_heading_down": "2935-fe0f", + "arrows_clockwise": "1f503", + "arrows_counterclockwise": "1f504", + "back": "1f519", + "end": "1f51a", + "on": "1f51b", + "soon": "1f51c", + "top": "1f51d", + "place_of_worship": "1f6d0", + "atom_symbol": "269b-fe0f", + "om_symbol": "1f549-fe0f", + "om": "1f549-fe0f", + "star_of_david": "2721-fe0f", + "wheel_of_dharma": "2638-fe0f", + "yin_yang": "262f-fe0f", + "latin_cross": "271d-fe0f", + "orthodox_cross": "2626-fe0f", + "star_and_crescent": "262a-fe0f", + "peace_symbol": "262e-fe0f", + "menorah_with_nine_branches": "1f54e", + "menorah": "1f54e", + "six_pointed_star": "1f52f", + "aries": "2648", + "taurus": "2649", + "gemini": "264a", + "cancer": "264b", + "leo": "264c", + "virgo": "264d", + "libra": "264e", + "scorpius": "264f", + "sagittarius": "2650", + "capricorn": "2651", + "aquarius": "2652", + "pisces": "2653", + "ophiuchus": "26ce", + "twisted_rightwards_arrows": "1f500", + "repeat": "1f501", + "repeat_one": "1f502", + "arrow_forward": "25b6-fe0f", + "fast_forward": "23e9", + "black_right_pointing_double_triangle_with_vertical_bar": "23ed-fe0f", + "next_track_button": "23ed-fe0f", + "black_right_pointing_triangle_with_double_vertical_bar": "23ef-fe0f", + "play_or_pause_button": "23ef-fe0f", + "arrow_backward": "25c0-fe0f", + "rewind": "23ea", + "black_left_pointing_double_triangle_with_vertical_bar": "23ee-fe0f", + "previous_track_button": "23ee-fe0f", + "arrow_up_small": "1f53c", + "arrow_double_up": "23eb", + "arrow_down_small": "1f53d", + "arrow_double_down": "23ec", + "double_vertical_bar": "23f8-fe0f", + "pause_button": "23f8-fe0f", + "black_square_for_stop": "23f9-fe0f", + "stop_button": "23f9-fe0f", + "black_circle_for_record": "23fa-fe0f", + "record_button": "23fa-fe0f", + "eject": "23cf-fe0f", + "cinema": "1f3a6", + "low_brightness": "1f505", + "high_brightness": "1f506", + "signal_strength": "1f4f6", + "vibration_mode": "1f4f3", + "mobile_phone_off": "1f4f4", + "female_sign": "2640-fe0f", + "male_sign": "2642-fe0f", + "transgender_symbol": "26a7-fe0f", + "heavy_multiplication_x": "2716-fe0f", + "heavy_plus_sign": "2795", + "heavy_minus_sign": "2796", + "heavy_division_sign": "2797", + "infinity": "267e-fe0f", + "bangbang": "203c-fe0f", + "interrobang": "2049-fe0f", + "question": "2753", + "grey_question": "2754", + "grey_exclamation": "2755", + "exclamation": "2757", + "heavy_exclamation_mark": "2757", + "wavy_dash": "3030-fe0f", + "currency_exchange": "1f4b1", + "heavy_dollar_sign": "1f4b2", + "medical_symbol": "2695-fe0f", + "staff_of_aesculapius": "2695-fe0f", + "recycle": "267b-fe0f", + "fleur_de_lis": "269c-fe0f", + "trident": "1f531", + "name_badge": "1f4db", + "beginner": "1f530", + "o": "2b55", + "white_check_mark": "2705", + "ballot_box_with_check": "2611-fe0f", + "heavy_check_mark": "2714-fe0f", + "x": "274c", + "negative_squared_cross_mark": "274e", + "curly_loop": "27b0", + "loop": "27bf", + "part_alternation_mark": "303d-fe0f", + "eight_spoked_asterisk": "2733-fe0f", + "eight_pointed_black_star": "2734-fe0f", + "sparkle": "2747-fe0f", + "copyright": "00a9-fe0f", + "registered": "00ae-fe0f", + "tm": "2122-fe0f", + "hash": "0023-fe0f-20e3", + "keycap_star": "002a-fe0f-20e3", + "asterisk": "002a-fe0f-20e3", + "zero": "0030-fe0f-20e3", + "one": "0031-fe0f-20e3", + "two": "0032-fe0f-20e3", + "three": "0033-fe0f-20e3", + "four": "0034-fe0f-20e3", + "five": "0035-fe0f-20e3", + "six": "0036-fe0f-20e3", + "seven": "0037-fe0f-20e3", + "eight": "0038-fe0f-20e3", + "nine": "0039-fe0f-20e3", + "keycap_ten": "1f51f", + "capital_abcd": "1f520", + "abcd": "1f521", + "1234": "1f522", + "symbols": "1f523", + "abc": "1f524", + "a": "1f170-fe0f", + "ab": "1f18e", + "b": "1f171-fe0f", + "cl": "1f191", + "cool": "1f192", + "free": "1f193", + "information_source": "2139-fe0f", + "id": "1f194", + "m": "24c2-fe0f", + "new": "1f195", + "ng": "1f196", + "o2": "1f17e-fe0f", + "ok": "1f197", + "parking": "1f17f-fe0f", + "sos": "1f198", + "up": "1f199", + "vs": "1f19a", + "koko": "1f201", + "sa": "1f202-fe0f", + "u6708": "1f237-fe0f", + "u6709": "1f236", + "u6307": "1f22f", + "ideograph_advantage": "1f250", + "u5272": "1f239", + "u7121": "1f21a", + "u7981": "1f232", + "accept": "1f251", + "u7533": "1f238", + "u5408": "1f234", + "u7a7a": "1f233", + "congratulations": "3297-fe0f", + "secret": "3299-fe0f", + "u55b6": "1f23a", + "u6e80": "1f235", + "red_circle": "1f534", + "large_orange_circle": "1f7e0", + "large_yellow_circle": "1f7e1", + "large_green_circle": "1f7e2", + "large_blue_circle": "1f535", + "large_purple_circle": "1f7e3", + "large_brown_circle": "1f7e4", + "black_circle": "26ab", + "white_circle": "26aa", + "large_red_square": "1f7e5", + "large_orange_square": "1f7e7", + "large_yellow_square": "1f7e8", + "large_green_square": "1f7e9", + "large_blue_square": "1f7e6", + "large_purple_square": "1f7ea", + "large_brown_square": "1f7eb", + "black_large_square": "2b1b", + "white_large_square": "2b1c", + "black_medium_square": "25fc-fe0f", + "white_medium_square": "25fb-fe0f", + "black_medium_small_square": "25fe", + "white_medium_small_square": "25fd", + "black_small_square": "25aa-fe0f", + "white_small_square": "25ab-fe0f", + "large_orange_diamond": "1f536", + "large_blue_diamond": "1f537", + "small_orange_diamond": "1f538", + "small_blue_diamond": "1f539", + "small_red_triangle": "1f53a", + "small_red_triangle_down": "1f53b", + "diamond_shape_with_a_dot_inside": "1f4a0", + "radio_button": "1f518", + "white_square_button": "1f533", + "black_square_button": "1f532", + "checkered_flag": "1f3c1", + "triangular_flag_on_post": "1f6a9", + "crossed_flags": "1f38c", + "waving_black_flag": "1f3f4", + "black_flag": "1f3f4", + "waving_white_flag": "1f3f3-fe0f", + "white_flag": "1f3f3-fe0f", + "rainbow-flag": "1f3f3-fe0f-200d-1f308", + "rainbow_flag": "1f3f3-fe0f-200d-1f308", + "transgender_flag": "1f3f3-fe0f-200d-26a7-fe0f", + "pirate_flag": "1f3f4-200d-2620-fe0f", + "flag-ac": "1f1e6-1f1e8", + "flag-ad": "1f1e6-1f1e9", + "andorra": "1f1e6-1f1e9", + "flag-ae": "1f1e6-1f1ea", + "united_arab_emirates": "1f1e6-1f1ea", + "flag-af": "1f1e6-1f1eb", + "afghanistan": "1f1e6-1f1eb", + "flag-ag": "1f1e6-1f1ec", + "antigua_barbuda": "1f1e6-1f1ec", + "flag-ai": "1f1e6-1f1ee", + "anguilla": "1f1e6-1f1ee", + "flag-al": "1f1e6-1f1f1", + "albania": "1f1e6-1f1f1", + "flag-am": "1f1e6-1f1f2", + "armenia": "1f1e6-1f1f2", + "flag-ao": "1f1e6-1f1f4", + "angola": "1f1e6-1f1f4", + "flag-aq": "1f1e6-1f1f6", + "antarctica": "1f1e6-1f1f6", + "flag-ar": "1f1e6-1f1f7", + "argentina": "1f1e6-1f1f7", + "flag-as": "1f1e6-1f1f8", + "american_samoa": "1f1e6-1f1f8", + "flag-at": "1f1e6-1f1f9", + "austria": "1f1e6-1f1f9", + "flag-au": "1f1e6-1f1fa", + "australia": "1f1e6-1f1fa", + "flag-aw": "1f1e6-1f1fc", + "aruba": "1f1e6-1f1fc", + "flag-ax": "1f1e6-1f1fd", + "aland_islands": "1f1e6-1f1fd", + "flag-az": "1f1e6-1f1ff", + "azerbaijan": "1f1e6-1f1ff", + "flag-ba": "1f1e7-1f1e6", + "bosnia_herzegovina": "1f1e7-1f1e6", + "flag-bb": "1f1e7-1f1e7", + "barbados": "1f1e7-1f1e7", + "flag-bd": "1f1e7-1f1e9", + "bangladesh": "1f1e7-1f1e9", + "flag-be": "1f1e7-1f1ea", + "belgium": "1f1e7-1f1ea", + "flag-bf": "1f1e7-1f1eb", + "burkina_faso": "1f1e7-1f1eb", + "flag-bg": "1f1e7-1f1ec", + "bulgaria": "1f1e7-1f1ec", + "flag-bh": "1f1e7-1f1ed", + "bahrain": "1f1e7-1f1ed", + "flag-bi": "1f1e7-1f1ee", + "burundi": "1f1e7-1f1ee", + "flag-bj": "1f1e7-1f1ef", + "benin": "1f1e7-1f1ef", + "flag-bl": "1f1e7-1f1f1", + "st_barthelemy": "1f1e7-1f1f1", + "flag-bm": "1f1e7-1f1f2", + "bermuda": "1f1e7-1f1f2", + "flag-bn": "1f1e7-1f1f3", + "brunei": "1f1e7-1f1f3", + "flag-bo": "1f1e7-1f1f4", + "bolivia": "1f1e7-1f1f4", + "flag-bq": "1f1e7-1f1f6", + "caribbean_netherlands": "1f1e7-1f1f6", + "flag-br": "1f1e7-1f1f7", + "brazil": "1f1e7-1f1f7", + "flag-bs": "1f1e7-1f1f8", + "bahamas": "1f1e7-1f1f8", + "flag-bt": "1f1e7-1f1f9", + "bhutan": "1f1e7-1f1f9", + "flag-bv": "1f1e7-1f1fb", + "flag-bw": "1f1e7-1f1fc", + "botswana": "1f1e7-1f1fc", + "flag-by": "1f1e7-1f1fe", + "belarus": "1f1e7-1f1fe", + "flag-bz": "1f1e7-1f1ff", + "belize": "1f1e7-1f1ff", + "flag-ca": "1f1e8-1f1e6", + "ca": "1f1e8-1f1e6", + "canada": "1f1e8-1f1e6", + "flag-cc": "1f1e8-1f1e8", + "cocos_islands": "1f1e8-1f1e8", + "flag-cd": "1f1e8-1f1e9", + "congo_kinshasa": "1f1e8-1f1e9", + "flag-cf": "1f1e8-1f1eb", + "central_african_republic": "1f1e8-1f1eb", + "flag-cg": "1f1e8-1f1ec", + "congo_brazzaville": "1f1e8-1f1ec", + "flag-ch": "1f1e8-1f1ed", + "switzerland": "1f1e8-1f1ed", + "flag-ci": "1f1e8-1f1ee", + "cote_divoire": "1f1e8-1f1ee", + "flag-ck": "1f1e8-1f1f0", + "cook_islands": "1f1e8-1f1f0", + "flag-cl": "1f1e8-1f1f1", + "chile": "1f1e8-1f1f1", + "flag-cm": "1f1e8-1f1f2", + "cameroon": "1f1e8-1f1f2", + "cn": "1f1e8-1f1f3", + "flag-cn": "1f1e8-1f1f3", + "flag-co": "1f1e8-1f1f4", + "colombia": "1f1e8-1f1f4", + "flag-cp": "1f1e8-1f1f5", + "flag-cr": "1f1e8-1f1f7", + "costa_rica": "1f1e8-1f1f7", + "flag-cu": "1f1e8-1f1fa", + "cuba": "1f1e8-1f1fa", + "flag-cv": "1f1e8-1f1fb", + "cape_verde": "1f1e8-1f1fb", + "flag-cw": "1f1e8-1f1fc", + "curacao": "1f1e8-1f1fc", + "flag-cx": "1f1e8-1f1fd", + "christmas_island": "1f1e8-1f1fd", + "flag-cy": "1f1e8-1f1fe", + "cyprus": "1f1e8-1f1fe", + "flag-cz": "1f1e8-1f1ff", + "czech_republic": "1f1e8-1f1ff", + "de": "1f1e9-1f1ea", + "flag-de": "1f1e9-1f1ea", + "flag-dg": "1f1e9-1f1ec", + "flag-dj": "1f1e9-1f1ef", + "djibouti": "1f1e9-1f1ef", + "flag-dk": "1f1e9-1f1f0", + "denmark": "1f1e9-1f1f0", + "flag-dm": "1f1e9-1f1f2", + "dominica": "1f1e9-1f1f2", + "flag-do": "1f1e9-1f1f4", + "dominican_republic": "1f1e9-1f1f4", + "flag-dz": "1f1e9-1f1ff", + "algeria": "1f1e9-1f1ff", + "flag-ea": "1f1ea-1f1e6", + "flag-ec": "1f1ea-1f1e8", + "ecuador": "1f1ea-1f1e8", + "flag-ee": "1f1ea-1f1ea", + "estonia": "1f1ea-1f1ea", + "flag-eg": "1f1ea-1f1ec", + "egypt": "1f1ea-1f1ec", + "flag-eh": "1f1ea-1f1ed", + "western_sahara": "1f1ea-1f1ed", + "flag-er": "1f1ea-1f1f7", + "eritrea": "1f1ea-1f1f7", + "es": "1f1ea-1f1f8", + "flag-es": "1f1ea-1f1f8", + "flag-et": "1f1ea-1f1f9", + "ethiopia": "1f1ea-1f1f9", + "flag-eu": "1f1ea-1f1fa", + "eu": "1f1ea-1f1fa", + "european_union": "1f1ea-1f1fa", + "flag-fi": "1f1eb-1f1ee", + "finland": "1f1eb-1f1ee", + "flag-fj": "1f1eb-1f1ef", + "fiji": "1f1eb-1f1ef", + "flag-fk": "1f1eb-1f1f0", + "falkland_islands": "1f1eb-1f1f0", + "flag-fm": "1f1eb-1f1f2", + "micronesia": "1f1eb-1f1f2", + "flag-fo": "1f1eb-1f1f4", + "faroe_islands": "1f1eb-1f1f4", + "fr": "1f1eb-1f1f7", + "flag-fr": "1f1eb-1f1f7", + "flag-ga": "1f1ec-1f1e6", + "gabon": "1f1ec-1f1e6", + "gb": "1f1ec-1f1e7", + "uk": "1f1ec-1f1e7", + "flag-gb": "1f1ec-1f1e7", + "flag-gd": "1f1ec-1f1e9", + "grenada": "1f1ec-1f1e9", + "flag-ge": "1f1ec-1f1ea", + "georgia": "1f1ec-1f1ea", + "flag-gf": "1f1ec-1f1eb", + "french_guiana": "1f1ec-1f1eb", + "flag-gg": "1f1ec-1f1ec", + "guernsey": "1f1ec-1f1ec", + "flag-gh": "1f1ec-1f1ed", + "ghana": "1f1ec-1f1ed", + "flag-gi": "1f1ec-1f1ee", + "gibraltar": "1f1ec-1f1ee", + "flag-gl": "1f1ec-1f1f1", + "greenland": "1f1ec-1f1f1", + "flag-gm": "1f1ec-1f1f2", + "gambia": "1f1ec-1f1f2", + "flag-gn": "1f1ec-1f1f3", + "guinea": "1f1ec-1f1f3", + "flag-gp": "1f1ec-1f1f5", + "guadeloupe": "1f1ec-1f1f5", + "flag-gq": "1f1ec-1f1f6", + "equatorial_guinea": "1f1ec-1f1f6", + "flag-gr": "1f1ec-1f1f7", + "greece": "1f1ec-1f1f7", + "flag-gs": "1f1ec-1f1f8", + "south_georgia_south_sandwich_islands": "1f1ec-1f1f8", + "flag-gt": "1f1ec-1f1f9", + "guatemala": "1f1ec-1f1f9", + "flag-gu": "1f1ec-1f1fa", + "guam": "1f1ec-1f1fa", + "flag-gw": "1f1ec-1f1fc", + "guinea_bissau": "1f1ec-1f1fc", + "flag-gy": "1f1ec-1f1fe", + "guyana": "1f1ec-1f1fe", + "flag-hk": "1f1ed-1f1f0", + "hong_kong": "1f1ed-1f1f0", + "flag-hm": "1f1ed-1f1f2", + "flag-hn": "1f1ed-1f1f3", + "honduras": "1f1ed-1f1f3", + "flag-hr": "1f1ed-1f1f7", + "croatia": "1f1ed-1f1f7", + "flag-ht": "1f1ed-1f1f9", + "haiti": "1f1ed-1f1f9", + "flag-hu": "1f1ed-1f1fa", + "hungary": "1f1ed-1f1fa", + "flag-ic": "1f1ee-1f1e8", + "canary_islands": "1f1ee-1f1e8", + "flag-id": "1f1ee-1f1e9", + "indonesia": "1f1ee-1f1e9", + "flag-ie": "1f1ee-1f1ea", + "ireland": "1f1ee-1f1ea", + "flag-il": "1f1ee-1f1f1", + "israel": "1f1ee-1f1f1", + "flag-im": "1f1ee-1f1f2", + "isle_of_man": "1f1ee-1f1f2", + "flag-in": "1f1ee-1f1f3", + "india": "1f1ee-1f1f3", + "flag-io": "1f1ee-1f1f4", + "british_indian_ocean_territory": "1f1ee-1f1f4", + "flag-iq": "1f1ee-1f1f6", + "iraq": "1f1ee-1f1f6", + "flag-ir": "1f1ee-1f1f7", + "iran": "1f1ee-1f1f7", + "flag-is": "1f1ee-1f1f8", + "iceland": "1f1ee-1f1f8", + "it": "1f1ee-1f1f9", + "flag-it": "1f1ee-1f1f9", + "flag-je": "1f1ef-1f1ea", + "jersey": "1f1ef-1f1ea", + "flag-jm": "1f1ef-1f1f2", + "jamaica": "1f1ef-1f1f2", + "flag-jo": "1f1ef-1f1f4", + "jordan": "1f1ef-1f1f4", + "jp": "1f1ef-1f1f5", + "flag-jp": "1f1ef-1f1f5", + "flag-ke": "1f1f0-1f1ea", + "kenya": "1f1f0-1f1ea", + "flag-kg": "1f1f0-1f1ec", + "kyrgyzstan": "1f1f0-1f1ec", + "flag-kh": "1f1f0-1f1ed", + "cambodia": "1f1f0-1f1ed", + "flag-ki": "1f1f0-1f1ee", + "kiribati": "1f1f0-1f1ee", + "flag-km": "1f1f0-1f1f2", + "comoros": "1f1f0-1f1f2", + "flag-kn": "1f1f0-1f1f3", + "st_kitts_nevis": "1f1f0-1f1f3", + "flag-kp": "1f1f0-1f1f5", + "north_korea": "1f1f0-1f1f5", + "kr": "1f1f0-1f1f7", + "flag-kr": "1f1f0-1f1f7", + "flag-kw": "1f1f0-1f1fc", + "kuwait": "1f1f0-1f1fc", + "flag-ky": "1f1f0-1f1fe", + "cayman_islands": "1f1f0-1f1fe", + "flag-kz": "1f1f0-1f1ff", + "kazakhstan": "1f1f0-1f1ff", + "flag-la": "1f1f1-1f1e6", + "laos": "1f1f1-1f1e6", + "flag-lb": "1f1f1-1f1e7", + "lebanon": "1f1f1-1f1e7", + "flag-lc": "1f1f1-1f1e8", + "st_lucia": "1f1f1-1f1e8", + "flag-li": "1f1f1-1f1ee", + "liechtenstein": "1f1f1-1f1ee", + "flag-lk": "1f1f1-1f1f0", + "sri_lanka": "1f1f1-1f1f0", + "flag-lr": "1f1f1-1f1f7", + "liberia": "1f1f1-1f1f7", + "flag-ls": "1f1f1-1f1f8", + "lesotho": "1f1f1-1f1f8", + "flag-lt": "1f1f1-1f1f9", + "lithuania": "1f1f1-1f1f9", + "flag-lu": "1f1f1-1f1fa", + "luxembourg": "1f1f1-1f1fa", + "flag-lv": "1f1f1-1f1fb", + "latvia": "1f1f1-1f1fb", + "flag-ly": "1f1f1-1f1fe", + "libya": "1f1f1-1f1fe", + "flag-ma": "1f1f2-1f1e6", + "morocco": "1f1f2-1f1e6", + "flag-mc": "1f1f2-1f1e8", + "monaco": "1f1f2-1f1e8", + "flag-md": "1f1f2-1f1e9", + "moldova": "1f1f2-1f1e9", + "flag-me": "1f1f2-1f1ea", + "montenegro": "1f1f2-1f1ea", + "flag-mf": "1f1f2-1f1eb", + "flag-mg": "1f1f2-1f1ec", + "madagascar": "1f1f2-1f1ec", + "flag-mh": "1f1f2-1f1ed", + "marshall_islands": "1f1f2-1f1ed", + "flag-mk": "1f1f2-1f1f0", + "macedonia": "1f1f2-1f1f0", + "flag-ml": "1f1f2-1f1f1", + "mali": "1f1f2-1f1f1", + "flag-mm": "1f1f2-1f1f2", + "myanmar": "1f1f2-1f1f2", + "flag-mn": "1f1f2-1f1f3", + "mongolia": "1f1f2-1f1f3", + "flag-mo": "1f1f2-1f1f4", + "macau": "1f1f2-1f1f4", + "flag-mp": "1f1f2-1f1f5", + "northern_mariana_islands": "1f1f2-1f1f5", + "flag-mq": "1f1f2-1f1f6", + "martinique": "1f1f2-1f1f6", + "flag-mr": "1f1f2-1f1f7", + "mauritania": "1f1f2-1f1f7", + "flag-ms": "1f1f2-1f1f8", + "montserrat": "1f1f2-1f1f8", + "flag-mt": "1f1f2-1f1f9", + "malta": "1f1f2-1f1f9", + "flag-mu": "1f1f2-1f1fa", + "mauritius": "1f1f2-1f1fa", + "flag-mv": "1f1f2-1f1fb", + "maldives": "1f1f2-1f1fb", + "flag-mw": "1f1f2-1f1fc", + "malawi": "1f1f2-1f1fc", + "flag-mx": "1f1f2-1f1fd", + "mexico": "1f1f2-1f1fd", + "flag-my": "1f1f2-1f1fe", + "malaysia": "1f1f2-1f1fe", + "flag-mz": "1f1f2-1f1ff", + "mozambique": "1f1f2-1f1ff", + "flag-na": "1f1f3-1f1e6", + "namibia": "1f1f3-1f1e6", + "flag-nc": "1f1f3-1f1e8", + "new_caledonia": "1f1f3-1f1e8", + "flag-ne": "1f1f3-1f1ea", + "niger": "1f1f3-1f1ea", + "flag-nf": "1f1f3-1f1eb", + "norfolk_island": "1f1f3-1f1eb", + "flag-ng": "1f1f3-1f1ec", + "nigeria": "1f1f3-1f1ec", + "flag-ni": "1f1f3-1f1ee", + "nicaragua": "1f1f3-1f1ee", + "flag-nl": "1f1f3-1f1f1", + "netherlands": "1f1f3-1f1f1", + "flag-no": "1f1f3-1f1f4", + "norway": "1f1f3-1f1f4", + "flag-np": "1f1f3-1f1f5", + "nepal": "1f1f3-1f1f5", + "flag-nr": "1f1f3-1f1f7", + "nauru": "1f1f3-1f1f7", + "flag-nu": "1f1f3-1f1fa", + "niue": "1f1f3-1f1fa", + "flag-nz": "1f1f3-1f1ff", + "new_zealand": "1f1f3-1f1ff", + "flag-om": "1f1f4-1f1f2", + "oman": "1f1f4-1f1f2", + "flag-pa": "1f1f5-1f1e6", + "panama": "1f1f5-1f1e6", + "flag-pe": "1f1f5-1f1ea", + "peru": "1f1f5-1f1ea", + "flag-pf": "1f1f5-1f1eb", + "french_polynesia": "1f1f5-1f1eb", + "flag-pg": "1f1f5-1f1ec", + "papua_new_guinea": "1f1f5-1f1ec", + "flag-ph": "1f1f5-1f1ed", + "philippines": "1f1f5-1f1ed", + "flag-pk": "1f1f5-1f1f0", + "pakistan": "1f1f5-1f1f0", + "pk": "1f1f5-1f1f0", + "flag-pl": "1f1f5-1f1f1", + "poland": "1f1f5-1f1f1", + "flag-pm": "1f1f5-1f1f2", + "st_pierre_miquelon": "1f1f5-1f1f2", + "flag-pn": "1f1f5-1f1f3", + "pitcairn_islands": "1f1f5-1f1f3", + "flag-pr": "1f1f5-1f1f7", + "puerto_rico": "1f1f5-1f1f7", + "flag-ps": "1f1f5-1f1f8", + "palestinian_territories": "1f1f5-1f1f8", + "flag-pt": "1f1f5-1f1f9", + "portugal": "1f1f5-1f1f9", + "flag-pw": "1f1f5-1f1fc", + "palau": "1f1f5-1f1fc", + "flag-py": "1f1f5-1f1fe", + "paraguay": "1f1f5-1f1fe", + "flag-qa": "1f1f6-1f1e6", + "qatar": "1f1f6-1f1e6", + "flag-re": "1f1f7-1f1ea", + "reunion": "1f1f7-1f1ea", + "flag-ro": "1f1f7-1f1f4", + "romania": "1f1f7-1f1f4", + "flag-rs": "1f1f7-1f1f8", + "serbia": "1f1f7-1f1f8", + "ru": "1f1f7-1f1fa", + "flag-ru": "1f1f7-1f1fa", + "flag-rw": "1f1f7-1f1fc", + "rwanda": "1f1f7-1f1fc", + "flag-sa": "1f1f8-1f1e6", + "saudi_arabia": "1f1f8-1f1e6", + "flag-sb": "1f1f8-1f1e7", + "solomon_islands": "1f1f8-1f1e7", + "flag-sc": "1f1f8-1f1e8", + "seychelles": "1f1f8-1f1e8", + "flag-sd": "1f1f8-1f1e9", + "sudan": "1f1f8-1f1e9", + "flag-se": "1f1f8-1f1ea", + "sweden": "1f1f8-1f1ea", + "flag-sg": "1f1f8-1f1ec", + "singapore": "1f1f8-1f1ec", + "flag-sh": "1f1f8-1f1ed", + "st_helena": "1f1f8-1f1ed", + "flag-si": "1f1f8-1f1ee", + "slovenia": "1f1f8-1f1ee", + "flag-sj": "1f1f8-1f1ef", + "flag-sk": "1f1f8-1f1f0", + "slovakia": "1f1f8-1f1f0", + "flag-sl": "1f1f8-1f1f1", + "sierra_leone": "1f1f8-1f1f1", + "flag-sm": "1f1f8-1f1f2", + "san_marino": "1f1f8-1f1f2", + "flag-sn": "1f1f8-1f1f3", + "senegal": "1f1f8-1f1f3", + "flag-so": "1f1f8-1f1f4", + "somalia": "1f1f8-1f1f4", + "flag-sr": "1f1f8-1f1f7", + "suriname": "1f1f8-1f1f7", + "flag-ss": "1f1f8-1f1f8", + "south_sudan": "1f1f8-1f1f8", + "flag-st": "1f1f8-1f1f9", + "sao_tome_principe": "1f1f8-1f1f9", + "flag-sv": "1f1f8-1f1fb", + "el_salvador": "1f1f8-1f1fb", + "flag-sx": "1f1f8-1f1fd", + "sint_maarten": "1f1f8-1f1fd", + "flag-sy": "1f1f8-1f1fe", + "syria": "1f1f8-1f1fe", + "flag-sz": "1f1f8-1f1ff", + "swaziland": "1f1f8-1f1ff", + "flag-ta": "1f1f9-1f1e6", + "flag-tc": "1f1f9-1f1e8", + "turks_caicos_islands": "1f1f9-1f1e8", + "flag-td": "1f1f9-1f1e9", + "chad": "1f1f9-1f1e9", + "flag-tf": "1f1f9-1f1eb", + "french_southern_territories": "1f1f9-1f1eb", + "flag-tg": "1f1f9-1f1ec", + "togo": "1f1f9-1f1ec", + "flag-th": "1f1f9-1f1ed", + "thailand": "1f1f9-1f1ed", + "flag-tj": "1f1f9-1f1ef", + "tajikistan": "1f1f9-1f1ef", + "flag-tk": "1f1f9-1f1f0", + "tokelau": "1f1f9-1f1f0", + "flag-tl": "1f1f9-1f1f1", + "timor_leste": "1f1f9-1f1f1", + "flag-tm": "1f1f9-1f1f2", + "turkmenistan": "1f1f9-1f1f2", + "flag-tn": "1f1f9-1f1f3", + "tunisia": "1f1f9-1f1f3", + "flag-to": "1f1f9-1f1f4", + "tonga": "1f1f9-1f1f4", + "flag-tr": "1f1f9-1f1f7", + "tr": "1f1f9-1f1f7", + "flag-tt": "1f1f9-1f1f9", + "trinidad_tobago": "1f1f9-1f1f9", + "flag-tv": "1f1f9-1f1fb", + "tuvalu": "1f1f9-1f1fb", + "flag-tw": "1f1f9-1f1fc", + "taiwan": "1f1f9-1f1fc", + "flag-tz": "1f1f9-1f1ff", + "tanzania": "1f1f9-1f1ff", + "flag-ua": "1f1fa-1f1e6", + "ukraine": "1f1fa-1f1e6", + "flag-ug": "1f1fa-1f1ec", + "uganda": "1f1fa-1f1ec", + "flag-um": "1f1fa-1f1f2", + "flag-un": "1f1fa-1f1f3", + "us": "1f1fa-1f1f8", + "flag-us": "1f1fa-1f1f8", + "flag-uy": "1f1fa-1f1fe", + "uruguay": "1f1fa-1f1fe", + "flag-uz": "1f1fa-1f1ff", + "uzbekistan": "1f1fa-1f1ff", + "flag-va": "1f1fb-1f1e6", + "vatican_city": "1f1fb-1f1e6", + "flag-vc": "1f1fb-1f1e8", + "st_vincent_grenadines": "1f1fb-1f1e8", + "flag-ve": "1f1fb-1f1ea", + "venezuela": "1f1fb-1f1ea", + "flag-vg": "1f1fb-1f1ec", + "british_virgin_islands": "1f1fb-1f1ec", + "flag-vi": "1f1fb-1f1ee", + "us_virgin_islands": "1f1fb-1f1ee", + "flag-vn": "1f1fb-1f1f3", + "vietnam": "1f1fb-1f1f3", + "flag-vu": "1f1fb-1f1fa", + "vanuatu": "1f1fb-1f1fa", + "flag-wf": "1f1fc-1f1eb", + "wallis_futuna": "1f1fc-1f1eb", + "flag-ws": "1f1fc-1f1f8", + "samoa": "1f1fc-1f1f8", + "flag-xk": "1f1fd-1f1f0", + "kosovo": "1f1fd-1f1f0", + "flag-ye": "1f1fe-1f1ea", + "yemen": "1f1fe-1f1ea", + "flag-yt": "1f1fe-1f1f9", + "mayotte": "1f1fe-1f1f9", + "flag-za": "1f1ff-1f1e6", + "south_africa": "1f1ff-1f1e6", + "za": "1f1ff-1f1e6", + "flag-zm": "1f1ff-1f1f2", + "zambia": "1f1ff-1f1f2", + "flag-zw": "1f1ff-1f1fc", + "zimbabwe": "1f1ff-1f1fc", + "flag-england": "1f3f4-e0067-e0062-e0065-e006e-e0067-e007f", + "flag-scotland": "1f3f4-e0067-e0062-e0073-e0063-e0074-e007f", + "flag-wales": "1f3f4-e0067-e0062-e0077-e006c-e0073-e007f", + "santa_light_skin_tone": "1f385-1f3fb", + "santa_medium_light_skin_tone": "1f385-1f3fc", + "santa_medium_skin_tone": "1f385-1f3fd", + "santa_medium_dark_skin_tone": "1f385-1f3fe", + "santa_dark_skin_tone": "1f385-1f3ff", + "snowboarder_light_skin_tone": "1f3c2-1f3fb", + "snowboarder_medium_light_skin_tone": "1f3c2-1f3fc", + "snowboarder_medium_skin_tone": "1f3c2-1f3fd", + "snowboarder_medium_dark_skin_tone": "1f3c2-1f3fe", + "snowboarder_dark_skin_tone": "1f3c2-1f3ff", + "woman-running_light_skin_tone": "1f3c3-1f3fb-200d-2640-fe0f", + "running_woman_light_skin_tone": "1f3c3-1f3fb-200d-2640-fe0f", + "woman-running_medium_light_skin_tone": "1f3c3-1f3fc-200d-2640-fe0f", + "running_woman_medium_light_skin_tone": "1f3c3-1f3fc-200d-2640-fe0f", + "woman-running_medium_skin_tone": "1f3c3-1f3fd-200d-2640-fe0f", + "running_woman_medium_skin_tone": "1f3c3-1f3fd-200d-2640-fe0f", + "woman-running_medium_dark_skin_tone": "1f3c3-1f3fe-200d-2640-fe0f", + "running_woman_medium_dark_skin_tone": "1f3c3-1f3fe-200d-2640-fe0f", + "woman-running_dark_skin_tone": "1f3c3-1f3ff-200d-2640-fe0f", + "running_woman_dark_skin_tone": "1f3c3-1f3ff-200d-2640-fe0f", + "man-running_light_skin_tone": "1f3c3-1f3fb-200d-2642-fe0f", + "running_man_light_skin_tone": "1f3c3-1f3fb-200d-2642-fe0f", + "man-running_medium_light_skin_tone": "1f3c3-1f3fc-200d-2642-fe0f", + "running_man_medium_light_skin_tone": "1f3c3-1f3fc-200d-2642-fe0f", + "man-running_medium_skin_tone": "1f3c3-1f3fd-200d-2642-fe0f", + "running_man_medium_skin_tone": "1f3c3-1f3fd-200d-2642-fe0f", + "man-running_medium_dark_skin_tone": "1f3c3-1f3fe-200d-2642-fe0f", + "running_man_medium_dark_skin_tone": "1f3c3-1f3fe-200d-2642-fe0f", + "man-running_dark_skin_tone": "1f3c3-1f3ff-200d-2642-fe0f", + "running_man_dark_skin_tone": "1f3c3-1f3ff-200d-2642-fe0f", + "runner_light_skin_tone": "1f3c3-1f3fb", + "running_light_skin_tone": "1f3c3-1f3fb", + "runner_medium_light_skin_tone": "1f3c3-1f3fc", + "running_medium_light_skin_tone": "1f3c3-1f3fc", + "runner_medium_skin_tone": "1f3c3-1f3fd", + "running_medium_skin_tone": "1f3c3-1f3fd", + "runner_medium_dark_skin_tone": "1f3c3-1f3fe", + "running_medium_dark_skin_tone": "1f3c3-1f3fe", + "runner_dark_skin_tone": "1f3c3-1f3ff", + "running_dark_skin_tone": "1f3c3-1f3ff", + "woman-surfing_light_skin_tone": "1f3c4-1f3fb-200d-2640-fe0f", + "surfing_woman_light_skin_tone": "1f3c4-1f3fb-200d-2640-fe0f", + "woman-surfing_medium_light_skin_tone": "1f3c4-1f3fc-200d-2640-fe0f", + "surfing_woman_medium_light_skin_tone": "1f3c4-1f3fc-200d-2640-fe0f", + "woman-surfing_medium_skin_tone": "1f3c4-1f3fd-200d-2640-fe0f", + "surfing_woman_medium_skin_tone": "1f3c4-1f3fd-200d-2640-fe0f", + "woman-surfing_medium_dark_skin_tone": "1f3c4-1f3fe-200d-2640-fe0f", + "surfing_woman_medium_dark_skin_tone": "1f3c4-1f3fe-200d-2640-fe0f", + "woman-surfing_dark_skin_tone": "1f3c4-1f3ff-200d-2640-fe0f", + "surfing_woman_dark_skin_tone": "1f3c4-1f3ff-200d-2640-fe0f", + "man-surfing_light_skin_tone": "1f3c4-1f3fb-200d-2642-fe0f", + "surfing_man_light_skin_tone": "1f3c4-1f3fb-200d-2642-fe0f", + "man-surfing_medium_light_skin_tone": "1f3c4-1f3fc-200d-2642-fe0f", + "surfing_man_medium_light_skin_tone": "1f3c4-1f3fc-200d-2642-fe0f", + "man-surfing_medium_skin_tone": "1f3c4-1f3fd-200d-2642-fe0f", + "surfing_man_medium_skin_tone": "1f3c4-1f3fd-200d-2642-fe0f", + "man-surfing_medium_dark_skin_tone": "1f3c4-1f3fe-200d-2642-fe0f", + "surfing_man_medium_dark_skin_tone": "1f3c4-1f3fe-200d-2642-fe0f", + "man-surfing_dark_skin_tone": "1f3c4-1f3ff-200d-2642-fe0f", + "surfing_man_dark_skin_tone": "1f3c4-1f3ff-200d-2642-fe0f", + "surfer_light_skin_tone": "1f3c4-1f3fb", + "surfer_medium_light_skin_tone": "1f3c4-1f3fc", + "surfer_medium_skin_tone": "1f3c4-1f3fd", + "surfer_medium_dark_skin_tone": "1f3c4-1f3fe", + "surfer_dark_skin_tone": "1f3c4-1f3ff", + "horse_racing_light_skin_tone": "1f3c7-1f3fb", + "horse_racing_medium_light_skin_tone": "1f3c7-1f3fc", + "horse_racing_medium_skin_tone": "1f3c7-1f3fd", + "horse_racing_medium_dark_skin_tone": "1f3c7-1f3fe", + "horse_racing_dark_skin_tone": "1f3c7-1f3ff", + "woman-swimming_light_skin_tone": "1f3ca-1f3fb-200d-2640-fe0f", + "swimming_woman_light_skin_tone": "1f3ca-1f3fb-200d-2640-fe0f", + "woman-swimming_medium_light_skin_tone": "1f3ca-1f3fc-200d-2640-fe0f", + "swimming_woman_medium_light_skin_tone": "1f3ca-1f3fc-200d-2640-fe0f", + "woman-swimming_medium_skin_tone": "1f3ca-1f3fd-200d-2640-fe0f", + "swimming_woman_medium_skin_tone": "1f3ca-1f3fd-200d-2640-fe0f", + "woman-swimming_medium_dark_skin_tone": "1f3ca-1f3fe-200d-2640-fe0f", + "swimming_woman_medium_dark_skin_tone": "1f3ca-1f3fe-200d-2640-fe0f", + "woman-swimming_dark_skin_tone": "1f3ca-1f3ff-200d-2640-fe0f", + "swimming_woman_dark_skin_tone": "1f3ca-1f3ff-200d-2640-fe0f", + "man-swimming_light_skin_tone": "1f3ca-1f3fb-200d-2642-fe0f", + "swimming_man_light_skin_tone": "1f3ca-1f3fb-200d-2642-fe0f", + "man-swimming_medium_light_skin_tone": "1f3ca-1f3fc-200d-2642-fe0f", + "swimming_man_medium_light_skin_tone": "1f3ca-1f3fc-200d-2642-fe0f", + "man-swimming_medium_skin_tone": "1f3ca-1f3fd-200d-2642-fe0f", + "swimming_man_medium_skin_tone": "1f3ca-1f3fd-200d-2642-fe0f", + "man-swimming_medium_dark_skin_tone": "1f3ca-1f3fe-200d-2642-fe0f", + "swimming_man_medium_dark_skin_tone": "1f3ca-1f3fe-200d-2642-fe0f", + "man-swimming_dark_skin_tone": "1f3ca-1f3ff-200d-2642-fe0f", + "swimming_man_dark_skin_tone": "1f3ca-1f3ff-200d-2642-fe0f", + "swimmer_light_skin_tone": "1f3ca-1f3fb", + "swimmer_medium_light_skin_tone": "1f3ca-1f3fc", + "swimmer_medium_skin_tone": "1f3ca-1f3fd", + "swimmer_medium_dark_skin_tone": "1f3ca-1f3fe", + "swimmer_dark_skin_tone": "1f3ca-1f3ff", + "woman-lifting-weights_light_skin_tone": "1f3cb-1f3fb-200d-2640-fe0f", + "weight_lifting_woman_light_skin_tone": "1f3cb-1f3fb-200d-2640-fe0f", + "woman-lifting-weights_medium_light_skin_tone": "1f3cb-1f3fc-200d-2640-fe0f", + "weight_lifting_woman_medium_light_skin_tone": "1f3cb-1f3fc-200d-2640-fe0f", + "woman-lifting-weights_medium_skin_tone": "1f3cb-1f3fd-200d-2640-fe0f", + "weight_lifting_woman_medium_skin_tone": "1f3cb-1f3fd-200d-2640-fe0f", + "woman-lifting-weights_medium_dark_skin_tone": "1f3cb-1f3fe-200d-2640-fe0f", + "weight_lifting_woman_medium_dark_skin_tone": "1f3cb-1f3fe-200d-2640-fe0f", + "woman-lifting-weights_dark_skin_tone": "1f3cb-1f3ff-200d-2640-fe0f", + "weight_lifting_woman_dark_skin_tone": "1f3cb-1f3ff-200d-2640-fe0f", + "man-lifting-weights_light_skin_tone": "1f3cb-1f3fb-200d-2642-fe0f", + "weight_lifting_man_light_skin_tone": "1f3cb-1f3fb-200d-2642-fe0f", + "man-lifting-weights_medium_light_skin_tone": "1f3cb-1f3fc-200d-2642-fe0f", + "weight_lifting_man_medium_light_skin_tone": "1f3cb-1f3fc-200d-2642-fe0f", + "man-lifting-weights_medium_skin_tone": "1f3cb-1f3fd-200d-2642-fe0f", + "weight_lifting_man_medium_skin_tone": "1f3cb-1f3fd-200d-2642-fe0f", + "man-lifting-weights_medium_dark_skin_tone": "1f3cb-1f3fe-200d-2642-fe0f", + "weight_lifting_man_medium_dark_skin_tone": "1f3cb-1f3fe-200d-2642-fe0f", + "man-lifting-weights_dark_skin_tone": "1f3cb-1f3ff-200d-2642-fe0f", + "weight_lifting_man_dark_skin_tone": "1f3cb-1f3ff-200d-2642-fe0f", + "weight_lifter_light_skin_tone": "1f3cb-1f3fb", + "weight_lifter_medium_light_skin_tone": "1f3cb-1f3fc", + "weight_lifter_medium_skin_tone": "1f3cb-1f3fd", + "weight_lifter_medium_dark_skin_tone": "1f3cb-1f3fe", + "weight_lifter_dark_skin_tone": "1f3cb-1f3ff", + "woman-golfing_light_skin_tone": "1f3cc-1f3fb-200d-2640-fe0f", + "golfing_woman_light_skin_tone": "1f3cc-1f3fb-200d-2640-fe0f", + "woman-golfing_medium_light_skin_tone": "1f3cc-1f3fc-200d-2640-fe0f", + "golfing_woman_medium_light_skin_tone": "1f3cc-1f3fc-200d-2640-fe0f", + "woman-golfing_medium_skin_tone": "1f3cc-1f3fd-200d-2640-fe0f", + "golfing_woman_medium_skin_tone": "1f3cc-1f3fd-200d-2640-fe0f", + "woman-golfing_medium_dark_skin_tone": "1f3cc-1f3fe-200d-2640-fe0f", + "golfing_woman_medium_dark_skin_tone": "1f3cc-1f3fe-200d-2640-fe0f", + "woman-golfing_dark_skin_tone": "1f3cc-1f3ff-200d-2640-fe0f", + "golfing_woman_dark_skin_tone": "1f3cc-1f3ff-200d-2640-fe0f", + "man-golfing_light_skin_tone": "1f3cc-1f3fb-200d-2642-fe0f", + "golfing_man_light_skin_tone": "1f3cc-1f3fb-200d-2642-fe0f", + "man-golfing_medium_light_skin_tone": "1f3cc-1f3fc-200d-2642-fe0f", + "golfing_man_medium_light_skin_tone": "1f3cc-1f3fc-200d-2642-fe0f", + "man-golfing_medium_skin_tone": "1f3cc-1f3fd-200d-2642-fe0f", + "golfing_man_medium_skin_tone": "1f3cc-1f3fd-200d-2642-fe0f", + "man-golfing_medium_dark_skin_tone": "1f3cc-1f3fe-200d-2642-fe0f", + "golfing_man_medium_dark_skin_tone": "1f3cc-1f3fe-200d-2642-fe0f", + "man-golfing_dark_skin_tone": "1f3cc-1f3ff-200d-2642-fe0f", + "golfing_man_dark_skin_tone": "1f3cc-1f3ff-200d-2642-fe0f", + "golfer_light_skin_tone": "1f3cc-1f3fb", + "golfer_medium_light_skin_tone": "1f3cc-1f3fc", + "golfer_medium_skin_tone": "1f3cc-1f3fd", + "golfer_medium_dark_skin_tone": "1f3cc-1f3fe", + "golfer_dark_skin_tone": "1f3cc-1f3ff", + "ear_light_skin_tone": "1f442-1f3fb", + "ear_medium_light_skin_tone": "1f442-1f3fc", + "ear_medium_skin_tone": "1f442-1f3fd", + "ear_medium_dark_skin_tone": "1f442-1f3fe", + "ear_dark_skin_tone": "1f442-1f3ff", + "nose_light_skin_tone": "1f443-1f3fb", + "nose_medium_light_skin_tone": "1f443-1f3fc", + "nose_medium_skin_tone": "1f443-1f3fd", + "nose_medium_dark_skin_tone": "1f443-1f3fe", + "nose_dark_skin_tone": "1f443-1f3ff", + "point_up_2_light_skin_tone": "1f446-1f3fb", + "point_up_2_medium_light_skin_tone": "1f446-1f3fc", + "point_up_2_medium_skin_tone": "1f446-1f3fd", + "point_up_2_medium_dark_skin_tone": "1f446-1f3fe", + "point_up_2_dark_skin_tone": "1f446-1f3ff", + "point_down_light_skin_tone": "1f447-1f3fb", + "point_down_medium_light_skin_tone": "1f447-1f3fc", + "point_down_medium_skin_tone": "1f447-1f3fd", + "point_down_medium_dark_skin_tone": "1f447-1f3fe", + "point_down_dark_skin_tone": "1f447-1f3ff", + "point_left_light_skin_tone": "1f448-1f3fb", + "point_left_medium_light_skin_tone": "1f448-1f3fc", + "point_left_medium_skin_tone": "1f448-1f3fd", + "point_left_medium_dark_skin_tone": "1f448-1f3fe", + "point_left_dark_skin_tone": "1f448-1f3ff", + "point_right_light_skin_tone": "1f449-1f3fb", + "point_right_medium_light_skin_tone": "1f449-1f3fc", + "point_right_medium_skin_tone": "1f449-1f3fd", + "point_right_medium_dark_skin_tone": "1f449-1f3fe", + "point_right_dark_skin_tone": "1f449-1f3ff", + "facepunch_light_skin_tone": "1f44a-1f3fb", + "punch_light_skin_tone": "1f44a-1f3fb", + "fist_oncoming_light_skin_tone": "1f44a-1f3fb", + "facepunch_medium_light_skin_tone": "1f44a-1f3fc", + "punch_medium_light_skin_tone": "1f44a-1f3fc", + "fist_oncoming_medium_light_skin_tone": "1f44a-1f3fc", + "facepunch_medium_skin_tone": "1f44a-1f3fd", + "punch_medium_skin_tone": "1f44a-1f3fd", + "fist_oncoming_medium_skin_tone": "1f44a-1f3fd", + "facepunch_medium_dark_skin_tone": "1f44a-1f3fe", + "punch_medium_dark_skin_tone": "1f44a-1f3fe", + "fist_oncoming_medium_dark_skin_tone": "1f44a-1f3fe", + "facepunch_dark_skin_tone": "1f44a-1f3ff", + "punch_dark_skin_tone": "1f44a-1f3ff", + "fist_oncoming_dark_skin_tone": "1f44a-1f3ff", + "wave_light_skin_tone": "1f44b-1f3fb", + "wave_medium_light_skin_tone": "1f44b-1f3fc", + "wave_medium_skin_tone": "1f44b-1f3fd", + "wave_medium_dark_skin_tone": "1f44b-1f3fe", + "wave_dark_skin_tone": "1f44b-1f3ff", + "ok_hand_light_skin_tone": "1f44c-1f3fb", + "ok_hand_medium_light_skin_tone": "1f44c-1f3fc", + "ok_hand_medium_skin_tone": "1f44c-1f3fd", + "ok_hand_medium_dark_skin_tone": "1f44c-1f3fe", + "ok_hand_dark_skin_tone": "1f44c-1f3ff", + "+1_light_skin_tone": "1f44d-1f3fb", + "thumbsup_light_skin_tone": "1f44d-1f3fb", + "+1_medium_light_skin_tone": "1f44d-1f3fc", + "thumbsup_medium_light_skin_tone": "1f44d-1f3fc", + "+1_medium_skin_tone": "1f44d-1f3fd", + "thumbsup_medium_skin_tone": "1f44d-1f3fd", + "+1_medium_dark_skin_tone": "1f44d-1f3fe", + "thumbsup_medium_dark_skin_tone": "1f44d-1f3fe", + "+1_dark_skin_tone": "1f44d-1f3ff", + "thumbsup_dark_skin_tone": "1f44d-1f3ff", + "-1_light_skin_tone": "1f44e-1f3fb", + "thumbsdown_light_skin_tone": "1f44e-1f3fb", + "-1_medium_light_skin_tone": "1f44e-1f3fc", + "thumbsdown_medium_light_skin_tone": "1f44e-1f3fc", + "-1_medium_skin_tone": "1f44e-1f3fd", + "thumbsdown_medium_skin_tone": "1f44e-1f3fd", + "-1_medium_dark_skin_tone": "1f44e-1f3fe", + "thumbsdown_medium_dark_skin_tone": "1f44e-1f3fe", + "-1_dark_skin_tone": "1f44e-1f3ff", + "thumbsdown_dark_skin_tone": "1f44e-1f3ff", + "clap_light_skin_tone": "1f44f-1f3fb", + "clap_medium_light_skin_tone": "1f44f-1f3fc", + "clap_medium_skin_tone": "1f44f-1f3fd", + "clap_medium_dark_skin_tone": "1f44f-1f3fe", + "clap_dark_skin_tone": "1f44f-1f3ff", + "open_hands_light_skin_tone": "1f450-1f3fb", + "open_hands_medium_light_skin_tone": "1f450-1f3fc", + "open_hands_medium_skin_tone": "1f450-1f3fd", + "open_hands_medium_dark_skin_tone": "1f450-1f3fe", + "open_hands_dark_skin_tone": "1f450-1f3ff", + "boy_light_skin_tone": "1f466-1f3fb", + "boy_medium_light_skin_tone": "1f466-1f3fc", + "boy_medium_skin_tone": "1f466-1f3fd", + "boy_medium_dark_skin_tone": "1f466-1f3fe", + "boy_dark_skin_tone": "1f466-1f3ff", + "girl_light_skin_tone": "1f467-1f3fb", + "girl_medium_light_skin_tone": "1f467-1f3fc", + "girl_medium_skin_tone": "1f467-1f3fd", + "girl_medium_dark_skin_tone": "1f467-1f3fe", + "girl_dark_skin_tone": "1f467-1f3ff", + "male-farmer_light_skin_tone": "1f468-1f3fb-200d-1f33e", + "man_farmer_light_skin_tone": "1f468-1f3fb-200d-1f33e", + "male-farmer_medium_light_skin_tone": "1f468-1f3fc-200d-1f33e", + "man_farmer_medium_light_skin_tone": "1f468-1f3fc-200d-1f33e", + "male-farmer_medium_skin_tone": "1f468-1f3fd-200d-1f33e", + "man_farmer_medium_skin_tone": "1f468-1f3fd-200d-1f33e", + "male-farmer_medium_dark_skin_tone": "1f468-1f3fe-200d-1f33e", + "man_farmer_medium_dark_skin_tone": "1f468-1f3fe-200d-1f33e", + "male-farmer_dark_skin_tone": "1f468-1f3ff-200d-1f33e", + "man_farmer_dark_skin_tone": "1f468-1f3ff-200d-1f33e", + "male-cook_light_skin_tone": "1f468-1f3fb-200d-1f373", + "man_cook_light_skin_tone": "1f468-1f3fb-200d-1f373", + "male-cook_medium_light_skin_tone": "1f468-1f3fc-200d-1f373", + "man_cook_medium_light_skin_tone": "1f468-1f3fc-200d-1f373", + "male-cook_medium_skin_tone": "1f468-1f3fd-200d-1f373", + "man_cook_medium_skin_tone": "1f468-1f3fd-200d-1f373", + "male-cook_medium_dark_skin_tone": "1f468-1f3fe-200d-1f373", + "man_cook_medium_dark_skin_tone": "1f468-1f3fe-200d-1f373", + "male-cook_dark_skin_tone": "1f468-1f3ff-200d-1f373", + "man_cook_dark_skin_tone": "1f468-1f3ff-200d-1f373", + "man_feeding_baby_light_skin_tone": "1f468-1f3fb-200d-1f37c", + "man_feeding_baby_medium_light_skin_tone": "1f468-1f3fc-200d-1f37c", + "man_feeding_baby_medium_skin_tone": "1f468-1f3fd-200d-1f37c", + "man_feeding_baby_medium_dark_skin_tone": "1f468-1f3fe-200d-1f37c", + "man_feeding_baby_dark_skin_tone": "1f468-1f3ff-200d-1f37c", + "male-student_light_skin_tone": "1f468-1f3fb-200d-1f393", + "man_student_light_skin_tone": "1f468-1f3fb-200d-1f393", + "male-student_medium_light_skin_tone": "1f468-1f3fc-200d-1f393", + "man_student_medium_light_skin_tone": "1f468-1f3fc-200d-1f393", + "male-student_medium_skin_tone": "1f468-1f3fd-200d-1f393", + "man_student_medium_skin_tone": "1f468-1f3fd-200d-1f393", + "male-student_medium_dark_skin_tone": "1f468-1f3fe-200d-1f393", + "man_student_medium_dark_skin_tone": "1f468-1f3fe-200d-1f393", + "male-student_dark_skin_tone": "1f468-1f3ff-200d-1f393", + "man_student_dark_skin_tone": "1f468-1f3ff-200d-1f393", + "male-singer_light_skin_tone": "1f468-1f3fb-200d-1f3a4", + "man_singer_light_skin_tone": "1f468-1f3fb-200d-1f3a4", + "male-singer_medium_light_skin_tone": "1f468-1f3fc-200d-1f3a4", + "man_singer_medium_light_skin_tone": "1f468-1f3fc-200d-1f3a4", + "male-singer_medium_skin_tone": "1f468-1f3fd-200d-1f3a4", + "man_singer_medium_skin_tone": "1f468-1f3fd-200d-1f3a4", + "male-singer_medium_dark_skin_tone": "1f468-1f3fe-200d-1f3a4", + "man_singer_medium_dark_skin_tone": "1f468-1f3fe-200d-1f3a4", + "male-singer_dark_skin_tone": "1f468-1f3ff-200d-1f3a4", + "man_singer_dark_skin_tone": "1f468-1f3ff-200d-1f3a4", + "male-artist_light_skin_tone": "1f468-1f3fb-200d-1f3a8", + "man_artist_light_skin_tone": "1f468-1f3fb-200d-1f3a8", + "male-artist_medium_light_skin_tone": "1f468-1f3fc-200d-1f3a8", + "man_artist_medium_light_skin_tone": "1f468-1f3fc-200d-1f3a8", + "male-artist_medium_skin_tone": "1f468-1f3fd-200d-1f3a8", + "man_artist_medium_skin_tone": "1f468-1f3fd-200d-1f3a8", + "male-artist_medium_dark_skin_tone": "1f468-1f3fe-200d-1f3a8", + "man_artist_medium_dark_skin_tone": "1f468-1f3fe-200d-1f3a8", + "male-artist_dark_skin_tone": "1f468-1f3ff-200d-1f3a8", + "man_artist_dark_skin_tone": "1f468-1f3ff-200d-1f3a8", + "male-teacher_light_skin_tone": "1f468-1f3fb-200d-1f3eb", + "man_teacher_light_skin_tone": "1f468-1f3fb-200d-1f3eb", + "male-teacher_medium_light_skin_tone": "1f468-1f3fc-200d-1f3eb", + "man_teacher_medium_light_skin_tone": "1f468-1f3fc-200d-1f3eb", + "male-teacher_medium_skin_tone": "1f468-1f3fd-200d-1f3eb", + "man_teacher_medium_skin_tone": "1f468-1f3fd-200d-1f3eb", + "male-teacher_medium_dark_skin_tone": "1f468-1f3fe-200d-1f3eb", + "man_teacher_medium_dark_skin_tone": "1f468-1f3fe-200d-1f3eb", + "male-teacher_dark_skin_tone": "1f468-1f3ff-200d-1f3eb", + "man_teacher_dark_skin_tone": "1f468-1f3ff-200d-1f3eb", + "male-factory-worker_light_skin_tone": "1f468-1f3fb-200d-1f3ed", + "man_factory_worker_light_skin_tone": "1f468-1f3fb-200d-1f3ed", + "male-factory-worker_medium_light_skin_tone": "1f468-1f3fc-200d-1f3ed", + "man_factory_worker_medium_light_skin_tone": "1f468-1f3fc-200d-1f3ed", + "male-factory-worker_medium_skin_tone": "1f468-1f3fd-200d-1f3ed", + "man_factory_worker_medium_skin_tone": "1f468-1f3fd-200d-1f3ed", + "male-factory-worker_medium_dark_skin_tone": "1f468-1f3fe-200d-1f3ed", + "man_factory_worker_medium_dark_skin_tone": "1f468-1f3fe-200d-1f3ed", + "male-factory-worker_dark_skin_tone": "1f468-1f3ff-200d-1f3ed", + "man_factory_worker_dark_skin_tone": "1f468-1f3ff-200d-1f3ed", + "male-technologist_light_skin_tone": "1f468-1f3fb-200d-1f4bb", + "man_technologist_light_skin_tone": "1f468-1f3fb-200d-1f4bb", + "male-technologist_medium_light_skin_tone": "1f468-1f3fc-200d-1f4bb", + "man_technologist_medium_light_skin_tone": "1f468-1f3fc-200d-1f4bb", + "male-technologist_medium_skin_tone": "1f468-1f3fd-200d-1f4bb", + "man_technologist_medium_skin_tone": "1f468-1f3fd-200d-1f4bb", + "male-technologist_medium_dark_skin_tone": "1f468-1f3fe-200d-1f4bb", + "man_technologist_medium_dark_skin_tone": "1f468-1f3fe-200d-1f4bb", + "male-technologist_dark_skin_tone": "1f468-1f3ff-200d-1f4bb", + "man_technologist_dark_skin_tone": "1f468-1f3ff-200d-1f4bb", + "male-office-worker_light_skin_tone": "1f468-1f3fb-200d-1f4bc", + "man_office_worker_light_skin_tone": "1f468-1f3fb-200d-1f4bc", + "male-office-worker_medium_light_skin_tone": "1f468-1f3fc-200d-1f4bc", + "man_office_worker_medium_light_skin_tone": "1f468-1f3fc-200d-1f4bc", + "male-office-worker_medium_skin_tone": "1f468-1f3fd-200d-1f4bc", + "man_office_worker_medium_skin_tone": "1f468-1f3fd-200d-1f4bc", + "male-office-worker_medium_dark_skin_tone": "1f468-1f3fe-200d-1f4bc", + "man_office_worker_medium_dark_skin_tone": "1f468-1f3fe-200d-1f4bc", + "male-office-worker_dark_skin_tone": "1f468-1f3ff-200d-1f4bc", + "man_office_worker_dark_skin_tone": "1f468-1f3ff-200d-1f4bc", + "male-mechanic_light_skin_tone": "1f468-1f3fb-200d-1f527", + "man_mechanic_light_skin_tone": "1f468-1f3fb-200d-1f527", + "male-mechanic_medium_light_skin_tone": "1f468-1f3fc-200d-1f527", + "man_mechanic_medium_light_skin_tone": "1f468-1f3fc-200d-1f527", + "male-mechanic_medium_skin_tone": "1f468-1f3fd-200d-1f527", + "man_mechanic_medium_skin_tone": "1f468-1f3fd-200d-1f527", + "male-mechanic_medium_dark_skin_tone": "1f468-1f3fe-200d-1f527", + "man_mechanic_medium_dark_skin_tone": "1f468-1f3fe-200d-1f527", + "male-mechanic_dark_skin_tone": "1f468-1f3ff-200d-1f527", + "man_mechanic_dark_skin_tone": "1f468-1f3ff-200d-1f527", + "male-scientist_light_skin_tone": "1f468-1f3fb-200d-1f52c", + "man_scientist_light_skin_tone": "1f468-1f3fb-200d-1f52c", + "male-scientist_medium_light_skin_tone": "1f468-1f3fc-200d-1f52c", + "man_scientist_medium_light_skin_tone": "1f468-1f3fc-200d-1f52c", + "male-scientist_medium_skin_tone": "1f468-1f3fd-200d-1f52c", + "man_scientist_medium_skin_tone": "1f468-1f3fd-200d-1f52c", + "male-scientist_medium_dark_skin_tone": "1f468-1f3fe-200d-1f52c", + "man_scientist_medium_dark_skin_tone": "1f468-1f3fe-200d-1f52c", + "male-scientist_dark_skin_tone": "1f468-1f3ff-200d-1f52c", + "man_scientist_dark_skin_tone": "1f468-1f3ff-200d-1f52c", + "male-astronaut_light_skin_tone": "1f468-1f3fb-200d-1f680", + "man_astronaut_light_skin_tone": "1f468-1f3fb-200d-1f680", + "male-astronaut_medium_light_skin_tone": "1f468-1f3fc-200d-1f680", + "man_astronaut_medium_light_skin_tone": "1f468-1f3fc-200d-1f680", + "male-astronaut_medium_skin_tone": "1f468-1f3fd-200d-1f680", + "man_astronaut_medium_skin_tone": "1f468-1f3fd-200d-1f680", + "male-astronaut_medium_dark_skin_tone": "1f468-1f3fe-200d-1f680", + "man_astronaut_medium_dark_skin_tone": "1f468-1f3fe-200d-1f680", + "male-astronaut_dark_skin_tone": "1f468-1f3ff-200d-1f680", + "man_astronaut_dark_skin_tone": "1f468-1f3ff-200d-1f680", + "male-firefighter_light_skin_tone": "1f468-1f3fb-200d-1f692", + "man_firefighter_light_skin_tone": "1f468-1f3fb-200d-1f692", + "male-firefighter_medium_light_skin_tone": "1f468-1f3fc-200d-1f692", + "man_firefighter_medium_light_skin_tone": "1f468-1f3fc-200d-1f692", + "male-firefighter_medium_skin_tone": "1f468-1f3fd-200d-1f692", + "man_firefighter_medium_skin_tone": "1f468-1f3fd-200d-1f692", + "male-firefighter_medium_dark_skin_tone": "1f468-1f3fe-200d-1f692", + "man_firefighter_medium_dark_skin_tone": "1f468-1f3fe-200d-1f692", + "male-firefighter_dark_skin_tone": "1f468-1f3ff-200d-1f692", + "man_firefighter_dark_skin_tone": "1f468-1f3ff-200d-1f692", + "man_with_probing_cane_light_skin_tone": "1f468-1f3fb-200d-1f9af", + "man_with_probing_cane_medium_light_skin_tone": "1f468-1f3fc-200d-1f9af", + "man_with_probing_cane_medium_skin_tone": "1f468-1f3fd-200d-1f9af", + "man_with_probing_cane_medium_dark_skin_tone": "1f468-1f3fe-200d-1f9af", + "man_with_probing_cane_dark_skin_tone": "1f468-1f3ff-200d-1f9af", + "red_haired_man_light_skin_tone": "1f468-1f3fb-200d-1f9b0", + "red_haired_man_medium_light_skin_tone": "1f468-1f3fc-200d-1f9b0", + "red_haired_man_medium_skin_tone": "1f468-1f3fd-200d-1f9b0", + "red_haired_man_medium_dark_skin_tone": "1f468-1f3fe-200d-1f9b0", + "red_haired_man_dark_skin_tone": "1f468-1f3ff-200d-1f9b0", + "curly_haired_man_light_skin_tone": "1f468-1f3fb-200d-1f9b1", + "curly_haired_man_medium_light_skin_tone": "1f468-1f3fc-200d-1f9b1", + "curly_haired_man_medium_skin_tone": "1f468-1f3fd-200d-1f9b1", + "curly_haired_man_medium_dark_skin_tone": "1f468-1f3fe-200d-1f9b1", + "curly_haired_man_dark_skin_tone": "1f468-1f3ff-200d-1f9b1", + "bald_man_light_skin_tone": "1f468-1f3fb-200d-1f9b2", + "bald_man_medium_light_skin_tone": "1f468-1f3fc-200d-1f9b2", + "bald_man_medium_skin_tone": "1f468-1f3fd-200d-1f9b2", + "bald_man_medium_dark_skin_tone": "1f468-1f3fe-200d-1f9b2", + "bald_man_dark_skin_tone": "1f468-1f3ff-200d-1f9b2", + "white_haired_man_light_skin_tone": "1f468-1f3fb-200d-1f9b3", + "white_haired_man_medium_light_skin_tone": "1f468-1f3fc-200d-1f9b3", + "white_haired_man_medium_skin_tone": "1f468-1f3fd-200d-1f9b3", + "white_haired_man_medium_dark_skin_tone": "1f468-1f3fe-200d-1f9b3", + "white_haired_man_dark_skin_tone": "1f468-1f3ff-200d-1f9b3", + "man_in_motorized_wheelchair_light_skin_tone": "1f468-1f3fb-200d-1f9bc", + "man_in_motorized_wheelchair_medium_light_skin_tone": "1f468-1f3fc-200d-1f9bc", + "man_in_motorized_wheelchair_medium_skin_tone": "1f468-1f3fd-200d-1f9bc", + "man_in_motorized_wheelchair_medium_dark_skin_tone": "1f468-1f3fe-200d-1f9bc", + "man_in_motorized_wheelchair_dark_skin_tone": "1f468-1f3ff-200d-1f9bc", + "man_in_manual_wheelchair_light_skin_tone": "1f468-1f3fb-200d-1f9bd", + "man_in_manual_wheelchair_medium_light_skin_tone": "1f468-1f3fc-200d-1f9bd", + "man_in_manual_wheelchair_medium_skin_tone": "1f468-1f3fd-200d-1f9bd", + "man_in_manual_wheelchair_medium_dark_skin_tone": "1f468-1f3fe-200d-1f9bd", + "man_in_manual_wheelchair_dark_skin_tone": "1f468-1f3ff-200d-1f9bd", + "male-doctor_light_skin_tone": "1f468-1f3fb-200d-2695-fe0f", + "man_health_worker_light_skin_tone": "1f468-1f3fb-200d-2695-fe0f", + "male-doctor_medium_light_skin_tone": "1f468-1f3fc-200d-2695-fe0f", + "man_health_worker_medium_light_skin_tone": "1f468-1f3fc-200d-2695-fe0f", + "male-doctor_medium_skin_tone": "1f468-1f3fd-200d-2695-fe0f", + "man_health_worker_medium_skin_tone": "1f468-1f3fd-200d-2695-fe0f", + "male-doctor_medium_dark_skin_tone": "1f468-1f3fe-200d-2695-fe0f", + "man_health_worker_medium_dark_skin_tone": "1f468-1f3fe-200d-2695-fe0f", + "male-doctor_dark_skin_tone": "1f468-1f3ff-200d-2695-fe0f", + "man_health_worker_dark_skin_tone": "1f468-1f3ff-200d-2695-fe0f", + "male-judge_light_skin_tone": "1f468-1f3fb-200d-2696-fe0f", + "man_judge_light_skin_tone": "1f468-1f3fb-200d-2696-fe0f", + "male-judge_medium_light_skin_tone": "1f468-1f3fc-200d-2696-fe0f", + "man_judge_medium_light_skin_tone": "1f468-1f3fc-200d-2696-fe0f", + "male-judge_medium_skin_tone": "1f468-1f3fd-200d-2696-fe0f", + "man_judge_medium_skin_tone": "1f468-1f3fd-200d-2696-fe0f", + "male-judge_medium_dark_skin_tone": "1f468-1f3fe-200d-2696-fe0f", + "man_judge_medium_dark_skin_tone": "1f468-1f3fe-200d-2696-fe0f", + "male-judge_dark_skin_tone": "1f468-1f3ff-200d-2696-fe0f", + "man_judge_dark_skin_tone": "1f468-1f3ff-200d-2696-fe0f", + "male-pilot_light_skin_tone": "1f468-1f3fb-200d-2708-fe0f", + "man_pilot_light_skin_tone": "1f468-1f3fb-200d-2708-fe0f", + "male-pilot_medium_light_skin_tone": "1f468-1f3fc-200d-2708-fe0f", + "man_pilot_medium_light_skin_tone": "1f468-1f3fc-200d-2708-fe0f", + "male-pilot_medium_skin_tone": "1f468-1f3fd-200d-2708-fe0f", + "man_pilot_medium_skin_tone": "1f468-1f3fd-200d-2708-fe0f", + "male-pilot_medium_dark_skin_tone": "1f468-1f3fe-200d-2708-fe0f", + "man_pilot_medium_dark_skin_tone": "1f468-1f3fe-200d-2708-fe0f", + "male-pilot_dark_skin_tone": "1f468-1f3ff-200d-2708-fe0f", + "man_pilot_dark_skin_tone": "1f468-1f3ff-200d-2708-fe0f", + "man_light_skin_tone": "1f468-1f3fb", + "man_medium_light_skin_tone": "1f468-1f3fc", + "man_medium_skin_tone": "1f468-1f3fd", + "man_medium_dark_skin_tone": "1f468-1f3fe", + "man_dark_skin_tone": "1f468-1f3ff", + "female-farmer_light_skin_tone": "1f469-1f3fb-200d-1f33e", + "woman_farmer_light_skin_tone": "1f469-1f3fb-200d-1f33e", + "female-farmer_medium_light_skin_tone": "1f469-1f3fc-200d-1f33e", + "woman_farmer_medium_light_skin_tone": "1f469-1f3fc-200d-1f33e", + "female-farmer_medium_skin_tone": "1f469-1f3fd-200d-1f33e", + "woman_farmer_medium_skin_tone": "1f469-1f3fd-200d-1f33e", + "female-farmer_medium_dark_skin_tone": "1f469-1f3fe-200d-1f33e", + "woman_farmer_medium_dark_skin_tone": "1f469-1f3fe-200d-1f33e", + "female-farmer_dark_skin_tone": "1f469-1f3ff-200d-1f33e", + "woman_farmer_dark_skin_tone": "1f469-1f3ff-200d-1f33e", + "female-cook_light_skin_tone": "1f469-1f3fb-200d-1f373", + "woman_cook_light_skin_tone": "1f469-1f3fb-200d-1f373", + "female-cook_medium_light_skin_tone": "1f469-1f3fc-200d-1f373", + "woman_cook_medium_light_skin_tone": "1f469-1f3fc-200d-1f373", + "female-cook_medium_skin_tone": "1f469-1f3fd-200d-1f373", + "woman_cook_medium_skin_tone": "1f469-1f3fd-200d-1f373", + "female-cook_medium_dark_skin_tone": "1f469-1f3fe-200d-1f373", + "woman_cook_medium_dark_skin_tone": "1f469-1f3fe-200d-1f373", + "female-cook_dark_skin_tone": "1f469-1f3ff-200d-1f373", + "woman_cook_dark_skin_tone": "1f469-1f3ff-200d-1f373", + "woman_feeding_baby_light_skin_tone": "1f469-1f3fb-200d-1f37c", + "woman_feeding_baby_medium_light_skin_tone": "1f469-1f3fc-200d-1f37c", + "woman_feeding_baby_medium_skin_tone": "1f469-1f3fd-200d-1f37c", + "woman_feeding_baby_medium_dark_skin_tone": "1f469-1f3fe-200d-1f37c", + "woman_feeding_baby_dark_skin_tone": "1f469-1f3ff-200d-1f37c", + "female-student_light_skin_tone": "1f469-1f3fb-200d-1f393", + "woman_student_light_skin_tone": "1f469-1f3fb-200d-1f393", + "female-student_medium_light_skin_tone": "1f469-1f3fc-200d-1f393", + "woman_student_medium_light_skin_tone": "1f469-1f3fc-200d-1f393", + "female-student_medium_skin_tone": "1f469-1f3fd-200d-1f393", + "woman_student_medium_skin_tone": "1f469-1f3fd-200d-1f393", + "female-student_medium_dark_skin_tone": "1f469-1f3fe-200d-1f393", + "woman_student_medium_dark_skin_tone": "1f469-1f3fe-200d-1f393", + "female-student_dark_skin_tone": "1f469-1f3ff-200d-1f393", + "woman_student_dark_skin_tone": "1f469-1f3ff-200d-1f393", + "female-singer_light_skin_tone": "1f469-1f3fb-200d-1f3a4", + "woman_singer_light_skin_tone": "1f469-1f3fb-200d-1f3a4", + "female-singer_medium_light_skin_tone": "1f469-1f3fc-200d-1f3a4", + "woman_singer_medium_light_skin_tone": "1f469-1f3fc-200d-1f3a4", + "female-singer_medium_skin_tone": "1f469-1f3fd-200d-1f3a4", + "woman_singer_medium_skin_tone": "1f469-1f3fd-200d-1f3a4", + "female-singer_medium_dark_skin_tone": "1f469-1f3fe-200d-1f3a4", + "woman_singer_medium_dark_skin_tone": "1f469-1f3fe-200d-1f3a4", + "female-singer_dark_skin_tone": "1f469-1f3ff-200d-1f3a4", + "woman_singer_dark_skin_tone": "1f469-1f3ff-200d-1f3a4", + "female-artist_light_skin_tone": "1f469-1f3fb-200d-1f3a8", + "woman_artist_light_skin_tone": "1f469-1f3fb-200d-1f3a8", + "female-artist_medium_light_skin_tone": "1f469-1f3fc-200d-1f3a8", + "woman_artist_medium_light_skin_tone": "1f469-1f3fc-200d-1f3a8", + "female-artist_medium_skin_tone": "1f469-1f3fd-200d-1f3a8", + "woman_artist_medium_skin_tone": "1f469-1f3fd-200d-1f3a8", + "female-artist_medium_dark_skin_tone": "1f469-1f3fe-200d-1f3a8", + "woman_artist_medium_dark_skin_tone": "1f469-1f3fe-200d-1f3a8", + "female-artist_dark_skin_tone": "1f469-1f3ff-200d-1f3a8", + "woman_artist_dark_skin_tone": "1f469-1f3ff-200d-1f3a8", + "female-teacher_light_skin_tone": "1f469-1f3fb-200d-1f3eb", + "woman_teacher_light_skin_tone": "1f469-1f3fb-200d-1f3eb", + "female-teacher_medium_light_skin_tone": "1f469-1f3fc-200d-1f3eb", + "woman_teacher_medium_light_skin_tone": "1f469-1f3fc-200d-1f3eb", + "female-teacher_medium_skin_tone": "1f469-1f3fd-200d-1f3eb", + "woman_teacher_medium_skin_tone": "1f469-1f3fd-200d-1f3eb", + "female-teacher_medium_dark_skin_tone": "1f469-1f3fe-200d-1f3eb", + "woman_teacher_medium_dark_skin_tone": "1f469-1f3fe-200d-1f3eb", + "female-teacher_dark_skin_tone": "1f469-1f3ff-200d-1f3eb", + "woman_teacher_dark_skin_tone": "1f469-1f3ff-200d-1f3eb", + "female-factory-worker_light_skin_tone": "1f469-1f3fb-200d-1f3ed", + "woman_factory_worker_light_skin_tone": "1f469-1f3fb-200d-1f3ed", + "female-factory-worker_medium_light_skin_tone": "1f469-1f3fc-200d-1f3ed", + "woman_factory_worker_medium_light_skin_tone": "1f469-1f3fc-200d-1f3ed", + "female-factory-worker_medium_skin_tone": "1f469-1f3fd-200d-1f3ed", + "woman_factory_worker_medium_skin_tone": "1f469-1f3fd-200d-1f3ed", + "female-factory-worker_medium_dark_skin_tone": "1f469-1f3fe-200d-1f3ed", + "woman_factory_worker_medium_dark_skin_tone": "1f469-1f3fe-200d-1f3ed", + "female-factory-worker_dark_skin_tone": "1f469-1f3ff-200d-1f3ed", + "woman_factory_worker_dark_skin_tone": "1f469-1f3ff-200d-1f3ed", + "female-technologist_light_skin_tone": "1f469-1f3fb-200d-1f4bb", + "woman_technologist_light_skin_tone": "1f469-1f3fb-200d-1f4bb", + "female-technologist_medium_light_skin_tone": "1f469-1f3fc-200d-1f4bb", + "woman_technologist_medium_light_skin_tone": "1f469-1f3fc-200d-1f4bb", + "female-technologist_medium_skin_tone": "1f469-1f3fd-200d-1f4bb", + "woman_technologist_medium_skin_tone": "1f469-1f3fd-200d-1f4bb", + "female-technologist_medium_dark_skin_tone": "1f469-1f3fe-200d-1f4bb", + "woman_technologist_medium_dark_skin_tone": "1f469-1f3fe-200d-1f4bb", + "female-technologist_dark_skin_tone": "1f469-1f3ff-200d-1f4bb", + "woman_technologist_dark_skin_tone": "1f469-1f3ff-200d-1f4bb", + "female-office-worker_light_skin_tone": "1f469-1f3fb-200d-1f4bc", + "woman_office_worker_light_skin_tone": "1f469-1f3fb-200d-1f4bc", + "female-office-worker_medium_light_skin_tone": "1f469-1f3fc-200d-1f4bc", + "woman_office_worker_medium_light_skin_tone": "1f469-1f3fc-200d-1f4bc", + "female-office-worker_medium_skin_tone": "1f469-1f3fd-200d-1f4bc", + "woman_office_worker_medium_skin_tone": "1f469-1f3fd-200d-1f4bc", + "female-office-worker_medium_dark_skin_tone": "1f469-1f3fe-200d-1f4bc", + "woman_office_worker_medium_dark_skin_tone": "1f469-1f3fe-200d-1f4bc", + "female-office-worker_dark_skin_tone": "1f469-1f3ff-200d-1f4bc", + "woman_office_worker_dark_skin_tone": "1f469-1f3ff-200d-1f4bc", + "female-mechanic_light_skin_tone": "1f469-1f3fb-200d-1f527", + "woman_mechanic_light_skin_tone": "1f469-1f3fb-200d-1f527", + "female-mechanic_medium_light_skin_tone": "1f469-1f3fc-200d-1f527", + "woman_mechanic_medium_light_skin_tone": "1f469-1f3fc-200d-1f527", + "female-mechanic_medium_skin_tone": "1f469-1f3fd-200d-1f527", + "woman_mechanic_medium_skin_tone": "1f469-1f3fd-200d-1f527", + "female-mechanic_medium_dark_skin_tone": "1f469-1f3fe-200d-1f527", + "woman_mechanic_medium_dark_skin_tone": "1f469-1f3fe-200d-1f527", + "female-mechanic_dark_skin_tone": "1f469-1f3ff-200d-1f527", + "woman_mechanic_dark_skin_tone": "1f469-1f3ff-200d-1f527", + "female-scientist_light_skin_tone": "1f469-1f3fb-200d-1f52c", + "woman_scientist_light_skin_tone": "1f469-1f3fb-200d-1f52c", + "female-scientist_medium_light_skin_tone": "1f469-1f3fc-200d-1f52c", + "woman_scientist_medium_light_skin_tone": "1f469-1f3fc-200d-1f52c", + "female-scientist_medium_skin_tone": "1f469-1f3fd-200d-1f52c", + "woman_scientist_medium_skin_tone": "1f469-1f3fd-200d-1f52c", + "female-scientist_medium_dark_skin_tone": "1f469-1f3fe-200d-1f52c", + "woman_scientist_medium_dark_skin_tone": "1f469-1f3fe-200d-1f52c", + "female-scientist_dark_skin_tone": "1f469-1f3ff-200d-1f52c", + "woman_scientist_dark_skin_tone": "1f469-1f3ff-200d-1f52c", + "female-astronaut_light_skin_tone": "1f469-1f3fb-200d-1f680", + "woman_astronaut_light_skin_tone": "1f469-1f3fb-200d-1f680", + "female-astronaut_medium_light_skin_tone": "1f469-1f3fc-200d-1f680", + "woman_astronaut_medium_light_skin_tone": "1f469-1f3fc-200d-1f680", + "female-astronaut_medium_skin_tone": "1f469-1f3fd-200d-1f680", + "woman_astronaut_medium_skin_tone": "1f469-1f3fd-200d-1f680", + "female-astronaut_medium_dark_skin_tone": "1f469-1f3fe-200d-1f680", + "woman_astronaut_medium_dark_skin_tone": "1f469-1f3fe-200d-1f680", + "female-astronaut_dark_skin_tone": "1f469-1f3ff-200d-1f680", + "woman_astronaut_dark_skin_tone": "1f469-1f3ff-200d-1f680", + "female-firefighter_light_skin_tone": "1f469-1f3fb-200d-1f692", + "woman_firefighter_light_skin_tone": "1f469-1f3fb-200d-1f692", + "female-firefighter_medium_light_skin_tone": "1f469-1f3fc-200d-1f692", + "woman_firefighter_medium_light_skin_tone": "1f469-1f3fc-200d-1f692", + "female-firefighter_medium_skin_tone": "1f469-1f3fd-200d-1f692", + "woman_firefighter_medium_skin_tone": "1f469-1f3fd-200d-1f692", + "female-firefighter_medium_dark_skin_tone": "1f469-1f3fe-200d-1f692", + "woman_firefighter_medium_dark_skin_tone": "1f469-1f3fe-200d-1f692", + "female-firefighter_dark_skin_tone": "1f469-1f3ff-200d-1f692", + "woman_firefighter_dark_skin_tone": "1f469-1f3ff-200d-1f692", + "woman_with_probing_cane_light_skin_tone": "1f469-1f3fb-200d-1f9af", + "woman_with_probing_cane_medium_light_skin_tone": "1f469-1f3fc-200d-1f9af", + "woman_with_probing_cane_medium_skin_tone": "1f469-1f3fd-200d-1f9af", + "woman_with_probing_cane_medium_dark_skin_tone": "1f469-1f3fe-200d-1f9af", + "woman_with_probing_cane_dark_skin_tone": "1f469-1f3ff-200d-1f9af", + "red_haired_woman_light_skin_tone": "1f469-1f3fb-200d-1f9b0", + "red_haired_woman_medium_light_skin_tone": "1f469-1f3fc-200d-1f9b0", + "red_haired_woman_medium_skin_tone": "1f469-1f3fd-200d-1f9b0", + "red_haired_woman_medium_dark_skin_tone": "1f469-1f3fe-200d-1f9b0", + "red_haired_woman_dark_skin_tone": "1f469-1f3ff-200d-1f9b0", + "curly_haired_woman_light_skin_tone": "1f469-1f3fb-200d-1f9b1", + "curly_haired_woman_medium_light_skin_tone": "1f469-1f3fc-200d-1f9b1", + "curly_haired_woman_medium_skin_tone": "1f469-1f3fd-200d-1f9b1", + "curly_haired_woman_medium_dark_skin_tone": "1f469-1f3fe-200d-1f9b1", + "curly_haired_woman_dark_skin_tone": "1f469-1f3ff-200d-1f9b1", + "bald_woman_light_skin_tone": "1f469-1f3fb-200d-1f9b2", + "bald_woman_medium_light_skin_tone": "1f469-1f3fc-200d-1f9b2", + "bald_woman_medium_skin_tone": "1f469-1f3fd-200d-1f9b2", + "bald_woman_medium_dark_skin_tone": "1f469-1f3fe-200d-1f9b2", + "bald_woman_dark_skin_tone": "1f469-1f3ff-200d-1f9b2", + "white_haired_woman_light_skin_tone": "1f469-1f3fb-200d-1f9b3", + "white_haired_woman_medium_light_skin_tone": "1f469-1f3fc-200d-1f9b3", + "white_haired_woman_medium_skin_tone": "1f469-1f3fd-200d-1f9b3", + "white_haired_woman_medium_dark_skin_tone": "1f469-1f3fe-200d-1f9b3", + "white_haired_woman_dark_skin_tone": "1f469-1f3ff-200d-1f9b3", + "woman_in_motorized_wheelchair_light_skin_tone": "1f469-1f3fb-200d-1f9bc", + "woman_in_motorized_wheelchair_medium_light_skin_tone": "1f469-1f3fc-200d-1f9bc", + "woman_in_motorized_wheelchair_medium_skin_tone": "1f469-1f3fd-200d-1f9bc", + "woman_in_motorized_wheelchair_medium_dark_skin_tone": "1f469-1f3fe-200d-1f9bc", + "woman_in_motorized_wheelchair_dark_skin_tone": "1f469-1f3ff-200d-1f9bc", + "woman_in_manual_wheelchair_light_skin_tone": "1f469-1f3fb-200d-1f9bd", + "woman_in_manual_wheelchair_medium_light_skin_tone": "1f469-1f3fc-200d-1f9bd", + "woman_in_manual_wheelchair_medium_skin_tone": "1f469-1f3fd-200d-1f9bd", + "woman_in_manual_wheelchair_medium_dark_skin_tone": "1f469-1f3fe-200d-1f9bd", + "woman_in_manual_wheelchair_dark_skin_tone": "1f469-1f3ff-200d-1f9bd", + "female-doctor_light_skin_tone": "1f469-1f3fb-200d-2695-fe0f", + "woman_health_worker_light_skin_tone": "1f469-1f3fb-200d-2695-fe0f", + "female-doctor_medium_light_skin_tone": "1f469-1f3fc-200d-2695-fe0f", + "woman_health_worker_medium_light_skin_tone": "1f469-1f3fc-200d-2695-fe0f", + "female-doctor_medium_skin_tone": "1f469-1f3fd-200d-2695-fe0f", + "woman_health_worker_medium_skin_tone": "1f469-1f3fd-200d-2695-fe0f", + "female-doctor_medium_dark_skin_tone": "1f469-1f3fe-200d-2695-fe0f", + "woman_health_worker_medium_dark_skin_tone": "1f469-1f3fe-200d-2695-fe0f", + "female-doctor_dark_skin_tone": "1f469-1f3ff-200d-2695-fe0f", + "woman_health_worker_dark_skin_tone": "1f469-1f3ff-200d-2695-fe0f", + "female-judge_light_skin_tone": "1f469-1f3fb-200d-2696-fe0f", + "woman_judge_light_skin_tone": "1f469-1f3fb-200d-2696-fe0f", + "female-judge_medium_light_skin_tone": "1f469-1f3fc-200d-2696-fe0f", + "woman_judge_medium_light_skin_tone": "1f469-1f3fc-200d-2696-fe0f", + "female-judge_medium_skin_tone": "1f469-1f3fd-200d-2696-fe0f", + "woman_judge_medium_skin_tone": "1f469-1f3fd-200d-2696-fe0f", + "female-judge_medium_dark_skin_tone": "1f469-1f3fe-200d-2696-fe0f", + "woman_judge_medium_dark_skin_tone": "1f469-1f3fe-200d-2696-fe0f", + "female-judge_dark_skin_tone": "1f469-1f3ff-200d-2696-fe0f", + "woman_judge_dark_skin_tone": "1f469-1f3ff-200d-2696-fe0f", + "female-pilot_light_skin_tone": "1f469-1f3fb-200d-2708-fe0f", + "woman_pilot_light_skin_tone": "1f469-1f3fb-200d-2708-fe0f", + "female-pilot_medium_light_skin_tone": "1f469-1f3fc-200d-2708-fe0f", + "woman_pilot_medium_light_skin_tone": "1f469-1f3fc-200d-2708-fe0f", + "female-pilot_medium_skin_tone": "1f469-1f3fd-200d-2708-fe0f", + "woman_pilot_medium_skin_tone": "1f469-1f3fd-200d-2708-fe0f", + "female-pilot_medium_dark_skin_tone": "1f469-1f3fe-200d-2708-fe0f", + "woman_pilot_medium_dark_skin_tone": "1f469-1f3fe-200d-2708-fe0f", + "female-pilot_dark_skin_tone": "1f469-1f3ff-200d-2708-fe0f", + "woman_pilot_dark_skin_tone": "1f469-1f3ff-200d-2708-fe0f", + "woman_light_skin_tone": "1f469-1f3fb", + "woman_medium_light_skin_tone": "1f469-1f3fc", + "woman_medium_skin_tone": "1f469-1f3fd", + "woman_medium_dark_skin_tone": "1f469-1f3fe", + "woman_dark_skin_tone": "1f469-1f3ff", + "man_and_woman_holding_hands_light_skin_tone": "1f46b-1f3fb", + "woman_and_man_holding_hands_light_skin_tone": "1f46b-1f3fb", + "couple_light_skin_tone": "1f46b-1f3fb", + "man_and_woman_holding_hands_medium_light_skin_tone": "1f46b-1f3fc", + "woman_and_man_holding_hands_medium_light_skin_tone": "1f46b-1f3fc", + "couple_medium_light_skin_tone": "1f46b-1f3fc", + "man_and_woman_holding_hands_medium_skin_tone": "1f46b-1f3fd", + "woman_and_man_holding_hands_medium_skin_tone": "1f46b-1f3fd", + "couple_medium_skin_tone": "1f46b-1f3fd", + "man_and_woman_holding_hands_medium_dark_skin_tone": "1f46b-1f3fe", + "woman_and_man_holding_hands_medium_dark_skin_tone": "1f46b-1f3fe", + "couple_medium_dark_skin_tone": "1f46b-1f3fe", + "man_and_woman_holding_hands_dark_skin_tone": "1f46b-1f3ff", + "woman_and_man_holding_hands_dark_skin_tone": "1f46b-1f3ff", + "couple_dark_skin_tone": "1f46b-1f3ff", + "man_and_woman_holding_hands_light_skin_tone_medium_light_skin_tone": "1f469-1f3fb-200d-1f91d-200d-1f468-1f3fc", + "woman_and_man_holding_hands_light_skin_tone_medium_light_skin_tone": "1f469-1f3fb-200d-1f91d-200d-1f468-1f3fc", + "couple_light_skin_tone_medium_light_skin_tone": "1f469-1f3fb-200d-1f91d-200d-1f468-1f3fc", + "man_and_woman_holding_hands_light_skin_tone_medium_skin_tone": "1f469-1f3fb-200d-1f91d-200d-1f468-1f3fd", + "woman_and_man_holding_hands_light_skin_tone_medium_skin_tone": "1f469-1f3fb-200d-1f91d-200d-1f468-1f3fd", + "couple_light_skin_tone_medium_skin_tone": "1f469-1f3fb-200d-1f91d-200d-1f468-1f3fd", + "man_and_woman_holding_hands_light_skin_tone_medium_dark_skin_tone": "1f469-1f3fb-200d-1f91d-200d-1f468-1f3fe", + "woman_and_man_holding_hands_light_skin_tone_medium_dark_skin_tone": "1f469-1f3fb-200d-1f91d-200d-1f468-1f3fe", + "couple_light_skin_tone_medium_dark_skin_tone": "1f469-1f3fb-200d-1f91d-200d-1f468-1f3fe", + "man_and_woman_holding_hands_light_skin_tone_dark_skin_tone": "1f469-1f3fb-200d-1f91d-200d-1f468-1f3ff", + "woman_and_man_holding_hands_light_skin_tone_dark_skin_tone": "1f469-1f3fb-200d-1f91d-200d-1f468-1f3ff", + "couple_light_skin_tone_dark_skin_tone": "1f469-1f3fb-200d-1f91d-200d-1f468-1f3ff", + "man_and_woman_holding_hands_medium_light_skin_tone_light_skin_tone": "1f469-1f3fc-200d-1f91d-200d-1f468-1f3fb", + "woman_and_man_holding_hands_medium_light_skin_tone_light_skin_tone": "1f469-1f3fc-200d-1f91d-200d-1f468-1f3fb", + "couple_medium_light_skin_tone_light_skin_tone": "1f469-1f3fc-200d-1f91d-200d-1f468-1f3fb", + "man_and_woman_holding_hands_medium_light_skin_tone_medium_skin_tone": "1f469-1f3fc-200d-1f91d-200d-1f468-1f3fd", + "woman_and_man_holding_hands_medium_light_skin_tone_medium_skin_tone": "1f469-1f3fc-200d-1f91d-200d-1f468-1f3fd", + "couple_medium_light_skin_tone_medium_skin_tone": "1f469-1f3fc-200d-1f91d-200d-1f468-1f3fd", + "man_and_woman_holding_hands_medium_light_skin_tone_medium_dark_skin_tone": "1f469-1f3fc-200d-1f91d-200d-1f468-1f3fe", + "woman_and_man_holding_hands_medium_light_skin_tone_medium_dark_skin_tone": "1f469-1f3fc-200d-1f91d-200d-1f468-1f3fe", + "couple_medium_light_skin_tone_medium_dark_skin_tone": "1f469-1f3fc-200d-1f91d-200d-1f468-1f3fe", + "man_and_woman_holding_hands_medium_light_skin_tone_dark_skin_tone": "1f469-1f3fc-200d-1f91d-200d-1f468-1f3ff", + "woman_and_man_holding_hands_medium_light_skin_tone_dark_skin_tone": "1f469-1f3fc-200d-1f91d-200d-1f468-1f3ff", + "couple_medium_light_skin_tone_dark_skin_tone": "1f469-1f3fc-200d-1f91d-200d-1f468-1f3ff", + "man_and_woman_holding_hands_medium_skin_tone_light_skin_tone": "1f469-1f3fd-200d-1f91d-200d-1f468-1f3fb", + "woman_and_man_holding_hands_medium_skin_tone_light_skin_tone": "1f469-1f3fd-200d-1f91d-200d-1f468-1f3fb", + "couple_medium_skin_tone_light_skin_tone": "1f469-1f3fd-200d-1f91d-200d-1f468-1f3fb", + "man_and_woman_holding_hands_medium_skin_tone_medium_light_skin_tone": "1f469-1f3fd-200d-1f91d-200d-1f468-1f3fc", + "woman_and_man_holding_hands_medium_skin_tone_medium_light_skin_tone": "1f469-1f3fd-200d-1f91d-200d-1f468-1f3fc", + "couple_medium_skin_tone_medium_light_skin_tone": "1f469-1f3fd-200d-1f91d-200d-1f468-1f3fc", + "man_and_woman_holding_hands_medium_skin_tone_medium_dark_skin_tone": "1f469-1f3fd-200d-1f91d-200d-1f468-1f3fe", + "woman_and_man_holding_hands_medium_skin_tone_medium_dark_skin_tone": "1f469-1f3fd-200d-1f91d-200d-1f468-1f3fe", + "couple_medium_skin_tone_medium_dark_skin_tone": "1f469-1f3fd-200d-1f91d-200d-1f468-1f3fe", + "man_and_woman_holding_hands_medium_skin_tone_dark_skin_tone": "1f469-1f3fd-200d-1f91d-200d-1f468-1f3ff", + "woman_and_man_holding_hands_medium_skin_tone_dark_skin_tone": "1f469-1f3fd-200d-1f91d-200d-1f468-1f3ff", + "couple_medium_skin_tone_dark_skin_tone": "1f469-1f3fd-200d-1f91d-200d-1f468-1f3ff", + "man_and_woman_holding_hands_medium_dark_skin_tone_light_skin_tone": "1f469-1f3fe-200d-1f91d-200d-1f468-1f3fb", + "woman_and_man_holding_hands_medium_dark_skin_tone_light_skin_tone": "1f469-1f3fe-200d-1f91d-200d-1f468-1f3fb", + "couple_medium_dark_skin_tone_light_skin_tone": "1f469-1f3fe-200d-1f91d-200d-1f468-1f3fb", + "man_and_woman_holding_hands_medium_dark_skin_tone_medium_light_skin_tone": "1f469-1f3fe-200d-1f91d-200d-1f468-1f3fc", + "woman_and_man_holding_hands_medium_dark_skin_tone_medium_light_skin_tone": "1f469-1f3fe-200d-1f91d-200d-1f468-1f3fc", + "couple_medium_dark_skin_tone_medium_light_skin_tone": "1f469-1f3fe-200d-1f91d-200d-1f468-1f3fc", + "man_and_woman_holding_hands_medium_dark_skin_tone_medium_skin_tone": "1f469-1f3fe-200d-1f91d-200d-1f468-1f3fd", + "woman_and_man_holding_hands_medium_dark_skin_tone_medium_skin_tone": "1f469-1f3fe-200d-1f91d-200d-1f468-1f3fd", + "couple_medium_dark_skin_tone_medium_skin_tone": "1f469-1f3fe-200d-1f91d-200d-1f468-1f3fd", + "man_and_woman_holding_hands_medium_dark_skin_tone_dark_skin_tone": "1f469-1f3fe-200d-1f91d-200d-1f468-1f3ff", + "woman_and_man_holding_hands_medium_dark_skin_tone_dark_skin_tone": "1f469-1f3fe-200d-1f91d-200d-1f468-1f3ff", + "couple_medium_dark_skin_tone_dark_skin_tone": "1f469-1f3fe-200d-1f91d-200d-1f468-1f3ff", + "man_and_woman_holding_hands_dark_skin_tone_light_skin_tone": "1f469-1f3ff-200d-1f91d-200d-1f468-1f3fb", + "woman_and_man_holding_hands_dark_skin_tone_light_skin_tone": "1f469-1f3ff-200d-1f91d-200d-1f468-1f3fb", + "couple_dark_skin_tone_light_skin_tone": "1f469-1f3ff-200d-1f91d-200d-1f468-1f3fb", + "man_and_woman_holding_hands_dark_skin_tone_medium_light_skin_tone": "1f469-1f3ff-200d-1f91d-200d-1f468-1f3fc", + "woman_and_man_holding_hands_dark_skin_tone_medium_light_skin_tone": "1f469-1f3ff-200d-1f91d-200d-1f468-1f3fc", + "couple_dark_skin_tone_medium_light_skin_tone": "1f469-1f3ff-200d-1f91d-200d-1f468-1f3fc", + "man_and_woman_holding_hands_dark_skin_tone_medium_skin_tone": "1f469-1f3ff-200d-1f91d-200d-1f468-1f3fd", + "woman_and_man_holding_hands_dark_skin_tone_medium_skin_tone": "1f469-1f3ff-200d-1f91d-200d-1f468-1f3fd", + "couple_dark_skin_tone_medium_skin_tone": "1f469-1f3ff-200d-1f91d-200d-1f468-1f3fd", + "man_and_woman_holding_hands_dark_skin_tone_medium_dark_skin_tone": "1f469-1f3ff-200d-1f91d-200d-1f468-1f3fe", + "woman_and_man_holding_hands_dark_skin_tone_medium_dark_skin_tone": "1f469-1f3ff-200d-1f91d-200d-1f468-1f3fe", + "couple_dark_skin_tone_medium_dark_skin_tone": "1f469-1f3ff-200d-1f91d-200d-1f468-1f3fe", + "two_men_holding_hands_light_skin_tone": "1f46c-1f3fb", + "men_holding_hands_light_skin_tone": "1f46c-1f3fb", + "two_men_holding_hands_medium_light_skin_tone": "1f46c-1f3fc", + "men_holding_hands_medium_light_skin_tone": "1f46c-1f3fc", + "two_men_holding_hands_medium_skin_tone": "1f46c-1f3fd", + "men_holding_hands_medium_skin_tone": "1f46c-1f3fd", + "two_men_holding_hands_medium_dark_skin_tone": "1f46c-1f3fe", + "men_holding_hands_medium_dark_skin_tone": "1f46c-1f3fe", + "two_men_holding_hands_dark_skin_tone": "1f46c-1f3ff", + "men_holding_hands_dark_skin_tone": "1f46c-1f3ff", + "two_men_holding_hands_light_skin_tone_medium_light_skin_tone": "1f468-1f3fb-200d-1f91d-200d-1f468-1f3fc", + "men_holding_hands_light_skin_tone_medium_light_skin_tone": "1f468-1f3fb-200d-1f91d-200d-1f468-1f3fc", + "two_men_holding_hands_light_skin_tone_medium_skin_tone": "1f468-1f3fb-200d-1f91d-200d-1f468-1f3fd", + "men_holding_hands_light_skin_tone_medium_skin_tone": "1f468-1f3fb-200d-1f91d-200d-1f468-1f3fd", + "two_men_holding_hands_light_skin_tone_medium_dark_skin_tone": "1f468-1f3fb-200d-1f91d-200d-1f468-1f3fe", + "men_holding_hands_light_skin_tone_medium_dark_skin_tone": "1f468-1f3fb-200d-1f91d-200d-1f468-1f3fe", + "two_men_holding_hands_light_skin_tone_dark_skin_tone": "1f468-1f3fb-200d-1f91d-200d-1f468-1f3ff", + "men_holding_hands_light_skin_tone_dark_skin_tone": "1f468-1f3fb-200d-1f91d-200d-1f468-1f3ff", + "two_men_holding_hands_medium_light_skin_tone_light_skin_tone": "1f468-1f3fc-200d-1f91d-200d-1f468-1f3fb", + "men_holding_hands_medium_light_skin_tone_light_skin_tone": "1f468-1f3fc-200d-1f91d-200d-1f468-1f3fb", + "two_men_holding_hands_medium_light_skin_tone_medium_skin_tone": "1f468-1f3fc-200d-1f91d-200d-1f468-1f3fd", + "men_holding_hands_medium_light_skin_tone_medium_skin_tone": "1f468-1f3fc-200d-1f91d-200d-1f468-1f3fd", + "two_men_holding_hands_medium_light_skin_tone_medium_dark_skin_tone": "1f468-1f3fc-200d-1f91d-200d-1f468-1f3fe", + "men_holding_hands_medium_light_skin_tone_medium_dark_skin_tone": "1f468-1f3fc-200d-1f91d-200d-1f468-1f3fe", + "two_men_holding_hands_medium_light_skin_tone_dark_skin_tone": "1f468-1f3fc-200d-1f91d-200d-1f468-1f3ff", + "men_holding_hands_medium_light_skin_tone_dark_skin_tone": "1f468-1f3fc-200d-1f91d-200d-1f468-1f3ff", + "two_men_holding_hands_medium_skin_tone_light_skin_tone": "1f468-1f3fd-200d-1f91d-200d-1f468-1f3fb", + "men_holding_hands_medium_skin_tone_light_skin_tone": "1f468-1f3fd-200d-1f91d-200d-1f468-1f3fb", + "two_men_holding_hands_medium_skin_tone_medium_light_skin_tone": "1f468-1f3fd-200d-1f91d-200d-1f468-1f3fc", + "men_holding_hands_medium_skin_tone_medium_light_skin_tone": "1f468-1f3fd-200d-1f91d-200d-1f468-1f3fc", + "two_men_holding_hands_medium_skin_tone_medium_dark_skin_tone": "1f468-1f3fd-200d-1f91d-200d-1f468-1f3fe", + "men_holding_hands_medium_skin_tone_medium_dark_skin_tone": "1f468-1f3fd-200d-1f91d-200d-1f468-1f3fe", + "two_men_holding_hands_medium_skin_tone_dark_skin_tone": "1f468-1f3fd-200d-1f91d-200d-1f468-1f3ff", + "men_holding_hands_medium_skin_tone_dark_skin_tone": "1f468-1f3fd-200d-1f91d-200d-1f468-1f3ff", + "two_men_holding_hands_medium_dark_skin_tone_light_skin_tone": "1f468-1f3fe-200d-1f91d-200d-1f468-1f3fb", + "men_holding_hands_medium_dark_skin_tone_light_skin_tone": "1f468-1f3fe-200d-1f91d-200d-1f468-1f3fb", + "two_men_holding_hands_medium_dark_skin_tone_medium_light_skin_tone": "1f468-1f3fe-200d-1f91d-200d-1f468-1f3fc", + "men_holding_hands_medium_dark_skin_tone_medium_light_skin_tone": "1f468-1f3fe-200d-1f91d-200d-1f468-1f3fc", + "two_men_holding_hands_medium_dark_skin_tone_medium_skin_tone": "1f468-1f3fe-200d-1f91d-200d-1f468-1f3fd", + "men_holding_hands_medium_dark_skin_tone_medium_skin_tone": "1f468-1f3fe-200d-1f91d-200d-1f468-1f3fd", + "two_men_holding_hands_medium_dark_skin_tone_dark_skin_tone": "1f468-1f3fe-200d-1f91d-200d-1f468-1f3ff", + "men_holding_hands_medium_dark_skin_tone_dark_skin_tone": "1f468-1f3fe-200d-1f91d-200d-1f468-1f3ff", + "two_men_holding_hands_dark_skin_tone_light_skin_tone": "1f468-1f3ff-200d-1f91d-200d-1f468-1f3fb", + "men_holding_hands_dark_skin_tone_light_skin_tone": "1f468-1f3ff-200d-1f91d-200d-1f468-1f3fb", + "two_men_holding_hands_dark_skin_tone_medium_light_skin_tone": "1f468-1f3ff-200d-1f91d-200d-1f468-1f3fc", + "men_holding_hands_dark_skin_tone_medium_light_skin_tone": "1f468-1f3ff-200d-1f91d-200d-1f468-1f3fc", + "two_men_holding_hands_dark_skin_tone_medium_skin_tone": "1f468-1f3ff-200d-1f91d-200d-1f468-1f3fd", + "men_holding_hands_dark_skin_tone_medium_skin_tone": "1f468-1f3ff-200d-1f91d-200d-1f468-1f3fd", + "two_men_holding_hands_dark_skin_tone_medium_dark_skin_tone": "1f468-1f3ff-200d-1f91d-200d-1f468-1f3fe", + "men_holding_hands_dark_skin_tone_medium_dark_skin_tone": "1f468-1f3ff-200d-1f91d-200d-1f468-1f3fe", + "two_women_holding_hands_light_skin_tone": "1f46d-1f3fb", + "women_holding_hands_light_skin_tone": "1f46d-1f3fb", + "two_women_holding_hands_medium_light_skin_tone": "1f46d-1f3fc", + "women_holding_hands_medium_light_skin_tone": "1f46d-1f3fc", + "two_women_holding_hands_medium_skin_tone": "1f46d-1f3fd", + "women_holding_hands_medium_skin_tone": "1f46d-1f3fd", + "two_women_holding_hands_medium_dark_skin_tone": "1f46d-1f3fe", + "women_holding_hands_medium_dark_skin_tone": "1f46d-1f3fe", + "two_women_holding_hands_dark_skin_tone": "1f46d-1f3ff", + "women_holding_hands_dark_skin_tone": "1f46d-1f3ff", + "two_women_holding_hands_light_skin_tone_medium_light_skin_tone": "1f469-1f3fb-200d-1f91d-200d-1f469-1f3fc", + "women_holding_hands_light_skin_tone_medium_light_skin_tone": "1f469-1f3fb-200d-1f91d-200d-1f469-1f3fc", + "two_women_holding_hands_light_skin_tone_medium_skin_tone": "1f469-1f3fb-200d-1f91d-200d-1f469-1f3fd", + "women_holding_hands_light_skin_tone_medium_skin_tone": "1f469-1f3fb-200d-1f91d-200d-1f469-1f3fd", + "two_women_holding_hands_light_skin_tone_medium_dark_skin_tone": "1f469-1f3fb-200d-1f91d-200d-1f469-1f3fe", + "women_holding_hands_light_skin_tone_medium_dark_skin_tone": "1f469-1f3fb-200d-1f91d-200d-1f469-1f3fe", + "two_women_holding_hands_light_skin_tone_dark_skin_tone": "1f469-1f3fb-200d-1f91d-200d-1f469-1f3ff", + "women_holding_hands_light_skin_tone_dark_skin_tone": "1f469-1f3fb-200d-1f91d-200d-1f469-1f3ff", + "two_women_holding_hands_medium_light_skin_tone_light_skin_tone": "1f469-1f3fc-200d-1f91d-200d-1f469-1f3fb", + "women_holding_hands_medium_light_skin_tone_light_skin_tone": "1f469-1f3fc-200d-1f91d-200d-1f469-1f3fb", + "two_women_holding_hands_medium_light_skin_tone_medium_skin_tone": "1f469-1f3fc-200d-1f91d-200d-1f469-1f3fd", + "women_holding_hands_medium_light_skin_tone_medium_skin_tone": "1f469-1f3fc-200d-1f91d-200d-1f469-1f3fd", + "two_women_holding_hands_medium_light_skin_tone_medium_dark_skin_tone": "1f469-1f3fc-200d-1f91d-200d-1f469-1f3fe", + "women_holding_hands_medium_light_skin_tone_medium_dark_skin_tone": "1f469-1f3fc-200d-1f91d-200d-1f469-1f3fe", + "two_women_holding_hands_medium_light_skin_tone_dark_skin_tone": "1f469-1f3fc-200d-1f91d-200d-1f469-1f3ff", + "women_holding_hands_medium_light_skin_tone_dark_skin_tone": "1f469-1f3fc-200d-1f91d-200d-1f469-1f3ff", + "two_women_holding_hands_medium_skin_tone_light_skin_tone": "1f469-1f3fd-200d-1f91d-200d-1f469-1f3fb", + "women_holding_hands_medium_skin_tone_light_skin_tone": "1f469-1f3fd-200d-1f91d-200d-1f469-1f3fb", + "two_women_holding_hands_medium_skin_tone_medium_light_skin_tone": "1f469-1f3fd-200d-1f91d-200d-1f469-1f3fc", + "women_holding_hands_medium_skin_tone_medium_light_skin_tone": "1f469-1f3fd-200d-1f91d-200d-1f469-1f3fc", + "two_women_holding_hands_medium_skin_tone_medium_dark_skin_tone": "1f469-1f3fd-200d-1f91d-200d-1f469-1f3fe", + "women_holding_hands_medium_skin_tone_medium_dark_skin_tone": "1f469-1f3fd-200d-1f91d-200d-1f469-1f3fe", + "two_women_holding_hands_medium_skin_tone_dark_skin_tone": "1f469-1f3fd-200d-1f91d-200d-1f469-1f3ff", + "women_holding_hands_medium_skin_tone_dark_skin_tone": "1f469-1f3fd-200d-1f91d-200d-1f469-1f3ff", + "two_women_holding_hands_medium_dark_skin_tone_light_skin_tone": "1f469-1f3fe-200d-1f91d-200d-1f469-1f3fb", + "women_holding_hands_medium_dark_skin_tone_light_skin_tone": "1f469-1f3fe-200d-1f91d-200d-1f469-1f3fb", + "two_women_holding_hands_medium_dark_skin_tone_medium_light_skin_tone": "1f469-1f3fe-200d-1f91d-200d-1f469-1f3fc", + "women_holding_hands_medium_dark_skin_tone_medium_light_skin_tone": "1f469-1f3fe-200d-1f91d-200d-1f469-1f3fc", + "two_women_holding_hands_medium_dark_skin_tone_medium_skin_tone": "1f469-1f3fe-200d-1f91d-200d-1f469-1f3fd", + "women_holding_hands_medium_dark_skin_tone_medium_skin_tone": "1f469-1f3fe-200d-1f91d-200d-1f469-1f3fd", + "two_women_holding_hands_medium_dark_skin_tone_dark_skin_tone": "1f469-1f3fe-200d-1f91d-200d-1f469-1f3ff", + "women_holding_hands_medium_dark_skin_tone_dark_skin_tone": "1f469-1f3fe-200d-1f91d-200d-1f469-1f3ff", + "two_women_holding_hands_dark_skin_tone_light_skin_tone": "1f469-1f3ff-200d-1f91d-200d-1f469-1f3fb", + "women_holding_hands_dark_skin_tone_light_skin_tone": "1f469-1f3ff-200d-1f91d-200d-1f469-1f3fb", + "two_women_holding_hands_dark_skin_tone_medium_light_skin_tone": "1f469-1f3ff-200d-1f91d-200d-1f469-1f3fc", + "women_holding_hands_dark_skin_tone_medium_light_skin_tone": "1f469-1f3ff-200d-1f91d-200d-1f469-1f3fc", + "two_women_holding_hands_dark_skin_tone_medium_skin_tone": "1f469-1f3ff-200d-1f91d-200d-1f469-1f3fd", + "women_holding_hands_dark_skin_tone_medium_skin_tone": "1f469-1f3ff-200d-1f91d-200d-1f469-1f3fd", + "two_women_holding_hands_dark_skin_tone_medium_dark_skin_tone": "1f469-1f3ff-200d-1f91d-200d-1f469-1f3fe", + "women_holding_hands_dark_skin_tone_medium_dark_skin_tone": "1f469-1f3ff-200d-1f91d-200d-1f469-1f3fe", + "female-police-officer_light_skin_tone": "1f46e-1f3fb-200d-2640-fe0f", + "policewoman_light_skin_tone": "1f46e-1f3fb-200d-2640-fe0f", + "female-police-officer_medium_light_skin_tone": "1f46e-1f3fc-200d-2640-fe0f", + "policewoman_medium_light_skin_tone": "1f46e-1f3fc-200d-2640-fe0f", + "female-police-officer_medium_skin_tone": "1f46e-1f3fd-200d-2640-fe0f", + "policewoman_medium_skin_tone": "1f46e-1f3fd-200d-2640-fe0f", + "female-police-officer_medium_dark_skin_tone": "1f46e-1f3fe-200d-2640-fe0f", + "policewoman_medium_dark_skin_tone": "1f46e-1f3fe-200d-2640-fe0f", + "female-police-officer_dark_skin_tone": "1f46e-1f3ff-200d-2640-fe0f", + "policewoman_dark_skin_tone": "1f46e-1f3ff-200d-2640-fe0f", + "male-police-officer_light_skin_tone": "1f46e-1f3fb-200d-2642-fe0f", + "policeman_light_skin_tone": "1f46e-1f3fb-200d-2642-fe0f", + "male-police-officer_medium_light_skin_tone": "1f46e-1f3fc-200d-2642-fe0f", + "policeman_medium_light_skin_tone": "1f46e-1f3fc-200d-2642-fe0f", + "male-police-officer_medium_skin_tone": "1f46e-1f3fd-200d-2642-fe0f", + "policeman_medium_skin_tone": "1f46e-1f3fd-200d-2642-fe0f", + "male-police-officer_medium_dark_skin_tone": "1f46e-1f3fe-200d-2642-fe0f", + "policeman_medium_dark_skin_tone": "1f46e-1f3fe-200d-2642-fe0f", + "male-police-officer_dark_skin_tone": "1f46e-1f3ff-200d-2642-fe0f", + "policeman_dark_skin_tone": "1f46e-1f3ff-200d-2642-fe0f", + "cop_light_skin_tone": "1f46e-1f3fb", + "cop_medium_light_skin_tone": "1f46e-1f3fc", + "cop_medium_skin_tone": "1f46e-1f3fd", + "cop_medium_dark_skin_tone": "1f46e-1f3fe", + "cop_dark_skin_tone": "1f46e-1f3ff", + "woman_with_veil_light_skin_tone": "1f470-1f3fb-200d-2640-fe0f", + "woman_with_veil_medium_light_skin_tone": "1f470-1f3fc-200d-2640-fe0f", + "woman_with_veil_medium_skin_tone": "1f470-1f3fd-200d-2640-fe0f", + "woman_with_veil_medium_dark_skin_tone": "1f470-1f3fe-200d-2640-fe0f", + "woman_with_veil_dark_skin_tone": "1f470-1f3ff-200d-2640-fe0f", + "man_with_veil_light_skin_tone": "1f470-1f3fb-200d-2642-fe0f", + "man_with_veil_medium_light_skin_tone": "1f470-1f3fc-200d-2642-fe0f", + "man_with_veil_medium_skin_tone": "1f470-1f3fd-200d-2642-fe0f", + "man_with_veil_medium_dark_skin_tone": "1f470-1f3fe-200d-2642-fe0f", + "man_with_veil_dark_skin_tone": "1f470-1f3ff-200d-2642-fe0f", + "bride_with_veil_light_skin_tone": "1f470-1f3fb", + "bride_with_veil_medium_light_skin_tone": "1f470-1f3fc", + "bride_with_veil_medium_skin_tone": "1f470-1f3fd", + "bride_with_veil_medium_dark_skin_tone": "1f470-1f3fe", + "bride_with_veil_dark_skin_tone": "1f470-1f3ff", + "blond-haired-woman_light_skin_tone": "1f471-1f3fb-200d-2640-fe0f", + "blonde_woman_light_skin_tone": "1f471-1f3fb-200d-2640-fe0f", + "blond-haired-woman_medium_light_skin_tone": "1f471-1f3fc-200d-2640-fe0f", + "blonde_woman_medium_light_skin_tone": "1f471-1f3fc-200d-2640-fe0f", + "blond-haired-woman_medium_skin_tone": "1f471-1f3fd-200d-2640-fe0f", + "blonde_woman_medium_skin_tone": "1f471-1f3fd-200d-2640-fe0f", + "blond-haired-woman_medium_dark_skin_tone": "1f471-1f3fe-200d-2640-fe0f", + "blonde_woman_medium_dark_skin_tone": "1f471-1f3fe-200d-2640-fe0f", + "blond-haired-woman_dark_skin_tone": "1f471-1f3ff-200d-2640-fe0f", + "blonde_woman_dark_skin_tone": "1f471-1f3ff-200d-2640-fe0f", + "blond-haired-man_light_skin_tone": "1f471-1f3fb-200d-2642-fe0f", + "blonde_man_light_skin_tone": "1f471-1f3fb-200d-2642-fe0f", + "blond-haired-man_medium_light_skin_tone": "1f471-1f3fc-200d-2642-fe0f", + "blonde_man_medium_light_skin_tone": "1f471-1f3fc-200d-2642-fe0f", + "blond-haired-man_medium_skin_tone": "1f471-1f3fd-200d-2642-fe0f", + "blonde_man_medium_skin_tone": "1f471-1f3fd-200d-2642-fe0f", + "blond-haired-man_medium_dark_skin_tone": "1f471-1f3fe-200d-2642-fe0f", + "blonde_man_medium_dark_skin_tone": "1f471-1f3fe-200d-2642-fe0f", + "blond-haired-man_dark_skin_tone": "1f471-1f3ff-200d-2642-fe0f", + "blonde_man_dark_skin_tone": "1f471-1f3ff-200d-2642-fe0f", + "person_with_blond_hair_light_skin_tone": "1f471-1f3fb", + "person_with_blond_hair_medium_light_skin_tone": "1f471-1f3fc", + "person_with_blond_hair_medium_skin_tone": "1f471-1f3fd", + "person_with_blond_hair_medium_dark_skin_tone": "1f471-1f3fe", + "person_with_blond_hair_dark_skin_tone": "1f471-1f3ff", + "man_with_gua_pi_mao_light_skin_tone": "1f472-1f3fb", + "man_with_gua_pi_mao_medium_light_skin_tone": "1f472-1f3fc", + "man_with_gua_pi_mao_medium_skin_tone": "1f472-1f3fd", + "man_with_gua_pi_mao_medium_dark_skin_tone": "1f472-1f3fe", + "man_with_gua_pi_mao_dark_skin_tone": "1f472-1f3ff", + "woman-wearing-turban_light_skin_tone": "1f473-1f3fb-200d-2640-fe0f", + "woman_with_turban_light_skin_tone": "1f473-1f3fb-200d-2640-fe0f", + "woman-wearing-turban_medium_light_skin_tone": "1f473-1f3fc-200d-2640-fe0f", + "woman_with_turban_medium_light_skin_tone": "1f473-1f3fc-200d-2640-fe0f", + "woman-wearing-turban_medium_skin_tone": "1f473-1f3fd-200d-2640-fe0f", + "woman_with_turban_medium_skin_tone": "1f473-1f3fd-200d-2640-fe0f", + "woman-wearing-turban_medium_dark_skin_tone": "1f473-1f3fe-200d-2640-fe0f", + "woman_with_turban_medium_dark_skin_tone": "1f473-1f3fe-200d-2640-fe0f", + "woman-wearing-turban_dark_skin_tone": "1f473-1f3ff-200d-2640-fe0f", + "woman_with_turban_dark_skin_tone": "1f473-1f3ff-200d-2640-fe0f", + "man-wearing-turban_light_skin_tone": "1f473-1f3fb-200d-2642-fe0f", + "man-wearing-turban_medium_light_skin_tone": "1f473-1f3fc-200d-2642-fe0f", + "man-wearing-turban_medium_skin_tone": "1f473-1f3fd-200d-2642-fe0f", + "man-wearing-turban_medium_dark_skin_tone": "1f473-1f3fe-200d-2642-fe0f", + "man-wearing-turban_dark_skin_tone": "1f473-1f3ff-200d-2642-fe0f", + "man_with_turban_light_skin_tone": "1f473-1f3fb", + "man_with_turban_medium_light_skin_tone": "1f473-1f3fc", + "man_with_turban_medium_skin_tone": "1f473-1f3fd", + "man_with_turban_medium_dark_skin_tone": "1f473-1f3fe", + "man_with_turban_dark_skin_tone": "1f473-1f3ff", + "older_man_light_skin_tone": "1f474-1f3fb", + "older_man_medium_light_skin_tone": "1f474-1f3fc", + "older_man_medium_skin_tone": "1f474-1f3fd", + "older_man_medium_dark_skin_tone": "1f474-1f3fe", + "older_man_dark_skin_tone": "1f474-1f3ff", + "older_woman_light_skin_tone": "1f475-1f3fb", + "older_woman_medium_light_skin_tone": "1f475-1f3fc", + "older_woman_medium_skin_tone": "1f475-1f3fd", + "older_woman_medium_dark_skin_tone": "1f475-1f3fe", + "older_woman_dark_skin_tone": "1f475-1f3ff", + "baby_light_skin_tone": "1f476-1f3fb", + "baby_medium_light_skin_tone": "1f476-1f3fc", + "baby_medium_skin_tone": "1f476-1f3fd", + "baby_medium_dark_skin_tone": "1f476-1f3fe", + "baby_dark_skin_tone": "1f476-1f3ff", + "female-construction-worker_light_skin_tone": "1f477-1f3fb-200d-2640-fe0f", + "construction_worker_woman_light_skin_tone": "1f477-1f3fb-200d-2640-fe0f", + "female-construction-worker_medium_light_skin_tone": "1f477-1f3fc-200d-2640-fe0f", + "construction_worker_woman_medium_light_skin_tone": "1f477-1f3fc-200d-2640-fe0f", + "female-construction-worker_medium_skin_tone": "1f477-1f3fd-200d-2640-fe0f", + "construction_worker_woman_medium_skin_tone": "1f477-1f3fd-200d-2640-fe0f", + "female-construction-worker_medium_dark_skin_tone": "1f477-1f3fe-200d-2640-fe0f", + "construction_worker_woman_medium_dark_skin_tone": "1f477-1f3fe-200d-2640-fe0f", + "female-construction-worker_dark_skin_tone": "1f477-1f3ff-200d-2640-fe0f", + "construction_worker_woman_dark_skin_tone": "1f477-1f3ff-200d-2640-fe0f", + "male-construction-worker_light_skin_tone": "1f477-1f3fb-200d-2642-fe0f", + "construction_worker_man_light_skin_tone": "1f477-1f3fb-200d-2642-fe0f", + "male-construction-worker_medium_light_skin_tone": "1f477-1f3fc-200d-2642-fe0f", + "construction_worker_man_medium_light_skin_tone": "1f477-1f3fc-200d-2642-fe0f", + "male-construction-worker_medium_skin_tone": "1f477-1f3fd-200d-2642-fe0f", + "construction_worker_man_medium_skin_tone": "1f477-1f3fd-200d-2642-fe0f", + "male-construction-worker_medium_dark_skin_tone": "1f477-1f3fe-200d-2642-fe0f", + "construction_worker_man_medium_dark_skin_tone": "1f477-1f3fe-200d-2642-fe0f", + "male-construction-worker_dark_skin_tone": "1f477-1f3ff-200d-2642-fe0f", + "construction_worker_man_dark_skin_tone": "1f477-1f3ff-200d-2642-fe0f", + "construction_worker_light_skin_tone": "1f477-1f3fb", + "construction_worker_medium_light_skin_tone": "1f477-1f3fc", + "construction_worker_medium_skin_tone": "1f477-1f3fd", + "construction_worker_medium_dark_skin_tone": "1f477-1f3fe", + "construction_worker_dark_skin_tone": "1f477-1f3ff", + "princess_light_skin_tone": "1f478-1f3fb", + "princess_medium_light_skin_tone": "1f478-1f3fc", + "princess_medium_skin_tone": "1f478-1f3fd", + "princess_medium_dark_skin_tone": "1f478-1f3fe", + "princess_dark_skin_tone": "1f478-1f3ff", + "angel_light_skin_tone": "1f47c-1f3fb", + "angel_medium_light_skin_tone": "1f47c-1f3fc", + "angel_medium_skin_tone": "1f47c-1f3fd", + "angel_medium_dark_skin_tone": "1f47c-1f3fe", + "angel_dark_skin_tone": "1f47c-1f3ff", + "woman-tipping-hand_light_skin_tone": "1f481-1f3fb-200d-2640-fe0f", + "tipping_hand_woman_light_skin_tone": "1f481-1f3fb-200d-2640-fe0f", + "woman-tipping-hand_medium_light_skin_tone": "1f481-1f3fc-200d-2640-fe0f", + "tipping_hand_woman_medium_light_skin_tone": "1f481-1f3fc-200d-2640-fe0f", + "woman-tipping-hand_medium_skin_tone": "1f481-1f3fd-200d-2640-fe0f", + "tipping_hand_woman_medium_skin_tone": "1f481-1f3fd-200d-2640-fe0f", + "woman-tipping-hand_medium_dark_skin_tone": "1f481-1f3fe-200d-2640-fe0f", + "tipping_hand_woman_medium_dark_skin_tone": "1f481-1f3fe-200d-2640-fe0f", + "woman-tipping-hand_dark_skin_tone": "1f481-1f3ff-200d-2640-fe0f", + "tipping_hand_woman_dark_skin_tone": "1f481-1f3ff-200d-2640-fe0f", + "man-tipping-hand_light_skin_tone": "1f481-1f3fb-200d-2642-fe0f", + "tipping_hand_man_light_skin_tone": "1f481-1f3fb-200d-2642-fe0f", + "man-tipping-hand_medium_light_skin_tone": "1f481-1f3fc-200d-2642-fe0f", + "tipping_hand_man_medium_light_skin_tone": "1f481-1f3fc-200d-2642-fe0f", + "man-tipping-hand_medium_skin_tone": "1f481-1f3fd-200d-2642-fe0f", + "tipping_hand_man_medium_skin_tone": "1f481-1f3fd-200d-2642-fe0f", + "man-tipping-hand_medium_dark_skin_tone": "1f481-1f3fe-200d-2642-fe0f", + "tipping_hand_man_medium_dark_skin_tone": "1f481-1f3fe-200d-2642-fe0f", + "man-tipping-hand_dark_skin_tone": "1f481-1f3ff-200d-2642-fe0f", + "tipping_hand_man_dark_skin_tone": "1f481-1f3ff-200d-2642-fe0f", + "information_desk_person_light_skin_tone": "1f481-1f3fb", + "information_desk_person_medium_light_skin_tone": "1f481-1f3fc", + "information_desk_person_medium_skin_tone": "1f481-1f3fd", + "information_desk_person_medium_dark_skin_tone": "1f481-1f3fe", + "information_desk_person_dark_skin_tone": "1f481-1f3ff", + "female-guard_light_skin_tone": "1f482-1f3fb-200d-2640-fe0f", + "guardswoman_light_skin_tone": "1f482-1f3fb-200d-2640-fe0f", + "female-guard_medium_light_skin_tone": "1f482-1f3fc-200d-2640-fe0f", + "guardswoman_medium_light_skin_tone": "1f482-1f3fc-200d-2640-fe0f", + "female-guard_medium_skin_tone": "1f482-1f3fd-200d-2640-fe0f", + "guardswoman_medium_skin_tone": "1f482-1f3fd-200d-2640-fe0f", + "female-guard_medium_dark_skin_tone": "1f482-1f3fe-200d-2640-fe0f", + "guardswoman_medium_dark_skin_tone": "1f482-1f3fe-200d-2640-fe0f", + "female-guard_dark_skin_tone": "1f482-1f3ff-200d-2640-fe0f", + "guardswoman_dark_skin_tone": "1f482-1f3ff-200d-2640-fe0f", + "male-guard_light_skin_tone": "1f482-1f3fb-200d-2642-fe0f", + "male-guard_medium_light_skin_tone": "1f482-1f3fc-200d-2642-fe0f", + "male-guard_medium_skin_tone": "1f482-1f3fd-200d-2642-fe0f", + "male-guard_medium_dark_skin_tone": "1f482-1f3fe-200d-2642-fe0f", + "male-guard_dark_skin_tone": "1f482-1f3ff-200d-2642-fe0f", + "guardsman_light_skin_tone": "1f482-1f3fb", + "guardsman_medium_light_skin_tone": "1f482-1f3fc", + "guardsman_medium_skin_tone": "1f482-1f3fd", + "guardsman_medium_dark_skin_tone": "1f482-1f3fe", + "guardsman_dark_skin_tone": "1f482-1f3ff", + "dancer_light_skin_tone": "1f483-1f3fb", + "dancer_medium_light_skin_tone": "1f483-1f3fc", + "dancer_medium_skin_tone": "1f483-1f3fd", + "dancer_medium_dark_skin_tone": "1f483-1f3fe", + "dancer_dark_skin_tone": "1f483-1f3ff", + "nail_care_light_skin_tone": "1f485-1f3fb", + "nail_care_medium_light_skin_tone": "1f485-1f3fc", + "nail_care_medium_skin_tone": "1f485-1f3fd", + "nail_care_medium_dark_skin_tone": "1f485-1f3fe", + "nail_care_dark_skin_tone": "1f485-1f3ff", + "woman-getting-massage_light_skin_tone": "1f486-1f3fb-200d-2640-fe0f", + "massage_woman_light_skin_tone": "1f486-1f3fb-200d-2640-fe0f", + "woman-getting-massage_medium_light_skin_tone": "1f486-1f3fc-200d-2640-fe0f", + "massage_woman_medium_light_skin_tone": "1f486-1f3fc-200d-2640-fe0f", + "woman-getting-massage_medium_skin_tone": "1f486-1f3fd-200d-2640-fe0f", + "massage_woman_medium_skin_tone": "1f486-1f3fd-200d-2640-fe0f", + "woman-getting-massage_medium_dark_skin_tone": "1f486-1f3fe-200d-2640-fe0f", + "massage_woman_medium_dark_skin_tone": "1f486-1f3fe-200d-2640-fe0f", + "woman-getting-massage_dark_skin_tone": "1f486-1f3ff-200d-2640-fe0f", + "massage_woman_dark_skin_tone": "1f486-1f3ff-200d-2640-fe0f", + "man-getting-massage_light_skin_tone": "1f486-1f3fb-200d-2642-fe0f", + "massage_man_light_skin_tone": "1f486-1f3fb-200d-2642-fe0f", + "man-getting-massage_medium_light_skin_tone": "1f486-1f3fc-200d-2642-fe0f", + "massage_man_medium_light_skin_tone": "1f486-1f3fc-200d-2642-fe0f", + "man-getting-massage_medium_skin_tone": "1f486-1f3fd-200d-2642-fe0f", + "massage_man_medium_skin_tone": "1f486-1f3fd-200d-2642-fe0f", + "man-getting-massage_medium_dark_skin_tone": "1f486-1f3fe-200d-2642-fe0f", + "massage_man_medium_dark_skin_tone": "1f486-1f3fe-200d-2642-fe0f", + "man-getting-massage_dark_skin_tone": "1f486-1f3ff-200d-2642-fe0f", + "massage_man_dark_skin_tone": "1f486-1f3ff-200d-2642-fe0f", + "massage_light_skin_tone": "1f486-1f3fb", + "massage_medium_light_skin_tone": "1f486-1f3fc", + "massage_medium_skin_tone": "1f486-1f3fd", + "massage_medium_dark_skin_tone": "1f486-1f3fe", + "massage_dark_skin_tone": "1f486-1f3ff", + "woman-getting-haircut_light_skin_tone": "1f487-1f3fb-200d-2640-fe0f", + "haircut_woman_light_skin_tone": "1f487-1f3fb-200d-2640-fe0f", + "woman-getting-haircut_medium_light_skin_tone": "1f487-1f3fc-200d-2640-fe0f", + "haircut_woman_medium_light_skin_tone": "1f487-1f3fc-200d-2640-fe0f", + "woman-getting-haircut_medium_skin_tone": "1f487-1f3fd-200d-2640-fe0f", + "haircut_woman_medium_skin_tone": "1f487-1f3fd-200d-2640-fe0f", + "woman-getting-haircut_medium_dark_skin_tone": "1f487-1f3fe-200d-2640-fe0f", + "haircut_woman_medium_dark_skin_tone": "1f487-1f3fe-200d-2640-fe0f", + "woman-getting-haircut_dark_skin_tone": "1f487-1f3ff-200d-2640-fe0f", + "haircut_woman_dark_skin_tone": "1f487-1f3ff-200d-2640-fe0f", + "man-getting-haircut_light_skin_tone": "1f487-1f3fb-200d-2642-fe0f", + "haircut_man_light_skin_tone": "1f487-1f3fb-200d-2642-fe0f", + "man-getting-haircut_medium_light_skin_tone": "1f487-1f3fc-200d-2642-fe0f", + "haircut_man_medium_light_skin_tone": "1f487-1f3fc-200d-2642-fe0f", + "man-getting-haircut_medium_skin_tone": "1f487-1f3fd-200d-2642-fe0f", + "haircut_man_medium_skin_tone": "1f487-1f3fd-200d-2642-fe0f", + "man-getting-haircut_medium_dark_skin_tone": "1f487-1f3fe-200d-2642-fe0f", + "haircut_man_medium_dark_skin_tone": "1f487-1f3fe-200d-2642-fe0f", + "man-getting-haircut_dark_skin_tone": "1f487-1f3ff-200d-2642-fe0f", + "haircut_man_dark_skin_tone": "1f487-1f3ff-200d-2642-fe0f", + "haircut_light_skin_tone": "1f487-1f3fb", + "haircut_medium_light_skin_tone": "1f487-1f3fc", + "haircut_medium_skin_tone": "1f487-1f3fd", + "haircut_medium_dark_skin_tone": "1f487-1f3fe", + "haircut_dark_skin_tone": "1f487-1f3ff", + "muscle_light_skin_tone": "1f4aa-1f3fb", + "muscle_medium_light_skin_tone": "1f4aa-1f3fc", + "muscle_medium_skin_tone": "1f4aa-1f3fd", + "muscle_medium_dark_skin_tone": "1f4aa-1f3fe", + "muscle_dark_skin_tone": "1f4aa-1f3ff", + "man_in_business_suit_levitating_light_skin_tone": "1f574-1f3fb", + "business_suit_levitating_light_skin_tone": "1f574-1f3fb", + "man_in_business_suit_levitating_medium_light_skin_tone": "1f574-1f3fc", + "business_suit_levitating_medium_light_skin_tone": "1f574-1f3fc", + "man_in_business_suit_levitating_medium_skin_tone": "1f574-1f3fd", + "business_suit_levitating_medium_skin_tone": "1f574-1f3fd", + "man_in_business_suit_levitating_medium_dark_skin_tone": "1f574-1f3fe", + "business_suit_levitating_medium_dark_skin_tone": "1f574-1f3fe", + "man_in_business_suit_levitating_dark_skin_tone": "1f574-1f3ff", + "business_suit_levitating_dark_skin_tone": "1f574-1f3ff", + "female-detective_light_skin_tone": "1f575-1f3fb-200d-2640-fe0f", + "female_detective_light_skin_tone": "1f575-1f3fb-200d-2640-fe0f", + "female-detective_medium_light_skin_tone": "1f575-1f3fc-200d-2640-fe0f", + "female_detective_medium_light_skin_tone": "1f575-1f3fc-200d-2640-fe0f", + "female-detective_medium_skin_tone": "1f575-1f3fd-200d-2640-fe0f", + "female_detective_medium_skin_tone": "1f575-1f3fd-200d-2640-fe0f", + "female-detective_medium_dark_skin_tone": "1f575-1f3fe-200d-2640-fe0f", + "female_detective_medium_dark_skin_tone": "1f575-1f3fe-200d-2640-fe0f", + "female-detective_dark_skin_tone": "1f575-1f3ff-200d-2640-fe0f", + "female_detective_dark_skin_tone": "1f575-1f3ff-200d-2640-fe0f", + "male-detective_light_skin_tone": "1f575-1f3fb-200d-2642-fe0f", + "male_detective_light_skin_tone": "1f575-1f3fb-200d-2642-fe0f", + "male-detective_medium_light_skin_tone": "1f575-1f3fc-200d-2642-fe0f", + "male_detective_medium_light_skin_tone": "1f575-1f3fc-200d-2642-fe0f", + "male-detective_medium_skin_tone": "1f575-1f3fd-200d-2642-fe0f", + "male_detective_medium_skin_tone": "1f575-1f3fd-200d-2642-fe0f", + "male-detective_medium_dark_skin_tone": "1f575-1f3fe-200d-2642-fe0f", + "male_detective_medium_dark_skin_tone": "1f575-1f3fe-200d-2642-fe0f", + "male-detective_dark_skin_tone": "1f575-1f3ff-200d-2642-fe0f", + "male_detective_dark_skin_tone": "1f575-1f3ff-200d-2642-fe0f", + "sleuth_or_spy_light_skin_tone": "1f575-1f3fb", + "sleuth_or_spy_medium_light_skin_tone": "1f575-1f3fc", + "sleuth_or_spy_medium_skin_tone": "1f575-1f3fd", + "sleuth_or_spy_medium_dark_skin_tone": "1f575-1f3fe", + "sleuth_or_spy_dark_skin_tone": "1f575-1f3ff", + "man_dancing_light_skin_tone": "1f57a-1f3fb", + "man_dancing_medium_light_skin_tone": "1f57a-1f3fc", + "man_dancing_medium_skin_tone": "1f57a-1f3fd", + "man_dancing_medium_dark_skin_tone": "1f57a-1f3fe", + "man_dancing_dark_skin_tone": "1f57a-1f3ff", + "raised_hand_with_fingers_splayed_light_skin_tone": "1f590-1f3fb", + "raised_hand_with_fingers_splayed_medium_light_skin_tone": "1f590-1f3fc", + "raised_hand_with_fingers_splayed_medium_skin_tone": "1f590-1f3fd", + "raised_hand_with_fingers_splayed_medium_dark_skin_tone": "1f590-1f3fe", + "raised_hand_with_fingers_splayed_dark_skin_tone": "1f590-1f3ff", + "middle_finger_light_skin_tone": "1f595-1f3fb", + "reversed_hand_with_middle_finger_extended_light_skin_tone": "1f595-1f3fb", + "middle_finger_medium_light_skin_tone": "1f595-1f3fc", + "reversed_hand_with_middle_finger_extended_medium_light_skin_tone": "1f595-1f3fc", + "middle_finger_medium_skin_tone": "1f595-1f3fd", + "reversed_hand_with_middle_finger_extended_medium_skin_tone": "1f595-1f3fd", + "middle_finger_medium_dark_skin_tone": "1f595-1f3fe", + "reversed_hand_with_middle_finger_extended_medium_dark_skin_tone": "1f595-1f3fe", + "middle_finger_dark_skin_tone": "1f595-1f3ff", + "reversed_hand_with_middle_finger_extended_dark_skin_tone": "1f595-1f3ff", + "spock-hand_light_skin_tone": "1f596-1f3fb", + "vulcan_salute_light_skin_tone": "1f596-1f3fb", + "spock-hand_medium_light_skin_tone": "1f596-1f3fc", + "vulcan_salute_medium_light_skin_tone": "1f596-1f3fc", + "spock-hand_medium_skin_tone": "1f596-1f3fd", + "vulcan_salute_medium_skin_tone": "1f596-1f3fd", + "spock-hand_medium_dark_skin_tone": "1f596-1f3fe", + "vulcan_salute_medium_dark_skin_tone": "1f596-1f3fe", + "spock-hand_dark_skin_tone": "1f596-1f3ff", + "vulcan_salute_dark_skin_tone": "1f596-1f3ff", + "woman-gesturing-no_light_skin_tone": "1f645-1f3fb-200d-2640-fe0f", + "no_good_woman_light_skin_tone": "1f645-1f3fb-200d-2640-fe0f", + "woman-gesturing-no_medium_light_skin_tone": "1f645-1f3fc-200d-2640-fe0f", + "no_good_woman_medium_light_skin_tone": "1f645-1f3fc-200d-2640-fe0f", + "woman-gesturing-no_medium_skin_tone": "1f645-1f3fd-200d-2640-fe0f", + "no_good_woman_medium_skin_tone": "1f645-1f3fd-200d-2640-fe0f", + "woman-gesturing-no_medium_dark_skin_tone": "1f645-1f3fe-200d-2640-fe0f", + "no_good_woman_medium_dark_skin_tone": "1f645-1f3fe-200d-2640-fe0f", + "woman-gesturing-no_dark_skin_tone": "1f645-1f3ff-200d-2640-fe0f", + "no_good_woman_dark_skin_tone": "1f645-1f3ff-200d-2640-fe0f", + "man-gesturing-no_light_skin_tone": "1f645-1f3fb-200d-2642-fe0f", + "no_good_man_light_skin_tone": "1f645-1f3fb-200d-2642-fe0f", + "man-gesturing-no_medium_light_skin_tone": "1f645-1f3fc-200d-2642-fe0f", + "no_good_man_medium_light_skin_tone": "1f645-1f3fc-200d-2642-fe0f", + "man-gesturing-no_medium_skin_tone": "1f645-1f3fd-200d-2642-fe0f", + "no_good_man_medium_skin_tone": "1f645-1f3fd-200d-2642-fe0f", + "man-gesturing-no_medium_dark_skin_tone": "1f645-1f3fe-200d-2642-fe0f", + "no_good_man_medium_dark_skin_tone": "1f645-1f3fe-200d-2642-fe0f", + "man-gesturing-no_dark_skin_tone": "1f645-1f3ff-200d-2642-fe0f", + "no_good_man_dark_skin_tone": "1f645-1f3ff-200d-2642-fe0f", + "no_good_light_skin_tone": "1f645-1f3fb", + "no_good_medium_light_skin_tone": "1f645-1f3fc", + "no_good_medium_skin_tone": "1f645-1f3fd", + "no_good_medium_dark_skin_tone": "1f645-1f3fe", + "no_good_dark_skin_tone": "1f645-1f3ff", + "woman-gesturing-ok_light_skin_tone": "1f646-1f3fb-200d-2640-fe0f", + "woman-gesturing-ok_medium_light_skin_tone": "1f646-1f3fc-200d-2640-fe0f", + "woman-gesturing-ok_medium_skin_tone": "1f646-1f3fd-200d-2640-fe0f", + "woman-gesturing-ok_medium_dark_skin_tone": "1f646-1f3fe-200d-2640-fe0f", + "woman-gesturing-ok_dark_skin_tone": "1f646-1f3ff-200d-2640-fe0f", + "man-gesturing-ok_light_skin_tone": "1f646-1f3fb-200d-2642-fe0f", + "ok_man_light_skin_tone": "1f646-1f3fb-200d-2642-fe0f", + "man-gesturing-ok_medium_light_skin_tone": "1f646-1f3fc-200d-2642-fe0f", + "ok_man_medium_light_skin_tone": "1f646-1f3fc-200d-2642-fe0f", + "man-gesturing-ok_medium_skin_tone": "1f646-1f3fd-200d-2642-fe0f", + "ok_man_medium_skin_tone": "1f646-1f3fd-200d-2642-fe0f", + "man-gesturing-ok_medium_dark_skin_tone": "1f646-1f3fe-200d-2642-fe0f", + "ok_man_medium_dark_skin_tone": "1f646-1f3fe-200d-2642-fe0f", + "man-gesturing-ok_dark_skin_tone": "1f646-1f3ff-200d-2642-fe0f", + "ok_man_dark_skin_tone": "1f646-1f3ff-200d-2642-fe0f", + "ok_woman_light_skin_tone": "1f646-1f3fb", + "ok_woman_medium_light_skin_tone": "1f646-1f3fc", + "ok_woman_medium_skin_tone": "1f646-1f3fd", + "ok_woman_medium_dark_skin_tone": "1f646-1f3fe", + "ok_woman_dark_skin_tone": "1f646-1f3ff", + "woman-bowing_light_skin_tone": "1f647-1f3fb-200d-2640-fe0f", + "bowing_woman_light_skin_tone": "1f647-1f3fb-200d-2640-fe0f", + "woman-bowing_medium_light_skin_tone": "1f647-1f3fc-200d-2640-fe0f", + "bowing_woman_medium_light_skin_tone": "1f647-1f3fc-200d-2640-fe0f", + "woman-bowing_medium_skin_tone": "1f647-1f3fd-200d-2640-fe0f", + "bowing_woman_medium_skin_tone": "1f647-1f3fd-200d-2640-fe0f", + "woman-bowing_medium_dark_skin_tone": "1f647-1f3fe-200d-2640-fe0f", + "bowing_woman_medium_dark_skin_tone": "1f647-1f3fe-200d-2640-fe0f", + "woman-bowing_dark_skin_tone": "1f647-1f3ff-200d-2640-fe0f", + "bowing_woman_dark_skin_tone": "1f647-1f3ff-200d-2640-fe0f", + "man-bowing_light_skin_tone": "1f647-1f3fb-200d-2642-fe0f", + "bowing_man_light_skin_tone": "1f647-1f3fb-200d-2642-fe0f", + "man-bowing_medium_light_skin_tone": "1f647-1f3fc-200d-2642-fe0f", + "bowing_man_medium_light_skin_tone": "1f647-1f3fc-200d-2642-fe0f", + "man-bowing_medium_skin_tone": "1f647-1f3fd-200d-2642-fe0f", + "bowing_man_medium_skin_tone": "1f647-1f3fd-200d-2642-fe0f", + "man-bowing_medium_dark_skin_tone": "1f647-1f3fe-200d-2642-fe0f", + "bowing_man_medium_dark_skin_tone": "1f647-1f3fe-200d-2642-fe0f", + "man-bowing_dark_skin_tone": "1f647-1f3ff-200d-2642-fe0f", + "bowing_man_dark_skin_tone": "1f647-1f3ff-200d-2642-fe0f", + "bow_light_skin_tone": "1f647-1f3fb", + "bow_medium_light_skin_tone": "1f647-1f3fc", + "bow_medium_skin_tone": "1f647-1f3fd", + "bow_medium_dark_skin_tone": "1f647-1f3fe", + "bow_dark_skin_tone": "1f647-1f3ff", + "woman-raising-hand_light_skin_tone": "1f64b-1f3fb-200d-2640-fe0f", + "raising_hand_woman_light_skin_tone": "1f64b-1f3fb-200d-2640-fe0f", + "woman-raising-hand_medium_light_skin_tone": "1f64b-1f3fc-200d-2640-fe0f", + "raising_hand_woman_medium_light_skin_tone": "1f64b-1f3fc-200d-2640-fe0f", + "woman-raising-hand_medium_skin_tone": "1f64b-1f3fd-200d-2640-fe0f", + "raising_hand_woman_medium_skin_tone": "1f64b-1f3fd-200d-2640-fe0f", + "woman-raising-hand_medium_dark_skin_tone": "1f64b-1f3fe-200d-2640-fe0f", + "raising_hand_woman_medium_dark_skin_tone": "1f64b-1f3fe-200d-2640-fe0f", + "woman-raising-hand_dark_skin_tone": "1f64b-1f3ff-200d-2640-fe0f", + "raising_hand_woman_dark_skin_tone": "1f64b-1f3ff-200d-2640-fe0f", + "man-raising-hand_light_skin_tone": "1f64b-1f3fb-200d-2642-fe0f", + "raising_hand_man_light_skin_tone": "1f64b-1f3fb-200d-2642-fe0f", + "man-raising-hand_medium_light_skin_tone": "1f64b-1f3fc-200d-2642-fe0f", + "raising_hand_man_medium_light_skin_tone": "1f64b-1f3fc-200d-2642-fe0f", + "man-raising-hand_medium_skin_tone": "1f64b-1f3fd-200d-2642-fe0f", + "raising_hand_man_medium_skin_tone": "1f64b-1f3fd-200d-2642-fe0f", + "man-raising-hand_medium_dark_skin_tone": "1f64b-1f3fe-200d-2642-fe0f", + "raising_hand_man_medium_dark_skin_tone": "1f64b-1f3fe-200d-2642-fe0f", + "man-raising-hand_dark_skin_tone": "1f64b-1f3ff-200d-2642-fe0f", + "raising_hand_man_dark_skin_tone": "1f64b-1f3ff-200d-2642-fe0f", + "raising_hand_light_skin_tone": "1f64b-1f3fb", + "raising_hand_medium_light_skin_tone": "1f64b-1f3fc", + "raising_hand_medium_skin_tone": "1f64b-1f3fd", + "raising_hand_medium_dark_skin_tone": "1f64b-1f3fe", + "raising_hand_dark_skin_tone": "1f64b-1f3ff", + "raised_hands_light_skin_tone": "1f64c-1f3fb", + "raised_hands_medium_light_skin_tone": "1f64c-1f3fc", + "raised_hands_medium_skin_tone": "1f64c-1f3fd", + "raised_hands_medium_dark_skin_tone": "1f64c-1f3fe", + "raised_hands_dark_skin_tone": "1f64c-1f3ff", + "woman-frowning_light_skin_tone": "1f64d-1f3fb-200d-2640-fe0f", + "frowning_woman_light_skin_tone": "1f64d-1f3fb-200d-2640-fe0f", + "woman-frowning_medium_light_skin_tone": "1f64d-1f3fc-200d-2640-fe0f", + "frowning_woman_medium_light_skin_tone": "1f64d-1f3fc-200d-2640-fe0f", + "woman-frowning_medium_skin_tone": "1f64d-1f3fd-200d-2640-fe0f", + "frowning_woman_medium_skin_tone": "1f64d-1f3fd-200d-2640-fe0f", + "woman-frowning_medium_dark_skin_tone": "1f64d-1f3fe-200d-2640-fe0f", + "frowning_woman_medium_dark_skin_tone": "1f64d-1f3fe-200d-2640-fe0f", + "woman-frowning_dark_skin_tone": "1f64d-1f3ff-200d-2640-fe0f", + "frowning_woman_dark_skin_tone": "1f64d-1f3ff-200d-2640-fe0f", + "man-frowning_light_skin_tone": "1f64d-1f3fb-200d-2642-fe0f", + "frowning_man_light_skin_tone": "1f64d-1f3fb-200d-2642-fe0f", + "man-frowning_medium_light_skin_tone": "1f64d-1f3fc-200d-2642-fe0f", + "frowning_man_medium_light_skin_tone": "1f64d-1f3fc-200d-2642-fe0f", + "man-frowning_medium_skin_tone": "1f64d-1f3fd-200d-2642-fe0f", + "frowning_man_medium_skin_tone": "1f64d-1f3fd-200d-2642-fe0f", + "man-frowning_medium_dark_skin_tone": "1f64d-1f3fe-200d-2642-fe0f", + "frowning_man_medium_dark_skin_tone": "1f64d-1f3fe-200d-2642-fe0f", + "man-frowning_dark_skin_tone": "1f64d-1f3ff-200d-2642-fe0f", + "frowning_man_dark_skin_tone": "1f64d-1f3ff-200d-2642-fe0f", + "person_frowning_light_skin_tone": "1f64d-1f3fb", + "person_frowning_medium_light_skin_tone": "1f64d-1f3fc", + "person_frowning_medium_skin_tone": "1f64d-1f3fd", + "person_frowning_medium_dark_skin_tone": "1f64d-1f3fe", + "person_frowning_dark_skin_tone": "1f64d-1f3ff", + "woman-pouting_light_skin_tone": "1f64e-1f3fb-200d-2640-fe0f", + "pouting_woman_light_skin_tone": "1f64e-1f3fb-200d-2640-fe0f", + "woman-pouting_medium_light_skin_tone": "1f64e-1f3fc-200d-2640-fe0f", + "pouting_woman_medium_light_skin_tone": "1f64e-1f3fc-200d-2640-fe0f", + "woman-pouting_medium_skin_tone": "1f64e-1f3fd-200d-2640-fe0f", + "pouting_woman_medium_skin_tone": "1f64e-1f3fd-200d-2640-fe0f", + "woman-pouting_medium_dark_skin_tone": "1f64e-1f3fe-200d-2640-fe0f", + "pouting_woman_medium_dark_skin_tone": "1f64e-1f3fe-200d-2640-fe0f", + "woman-pouting_dark_skin_tone": "1f64e-1f3ff-200d-2640-fe0f", + "pouting_woman_dark_skin_tone": "1f64e-1f3ff-200d-2640-fe0f", + "man-pouting_light_skin_tone": "1f64e-1f3fb-200d-2642-fe0f", + "pouting_man_light_skin_tone": "1f64e-1f3fb-200d-2642-fe0f", + "man-pouting_medium_light_skin_tone": "1f64e-1f3fc-200d-2642-fe0f", + "pouting_man_medium_light_skin_tone": "1f64e-1f3fc-200d-2642-fe0f", + "man-pouting_medium_skin_tone": "1f64e-1f3fd-200d-2642-fe0f", + "pouting_man_medium_skin_tone": "1f64e-1f3fd-200d-2642-fe0f", + "man-pouting_medium_dark_skin_tone": "1f64e-1f3fe-200d-2642-fe0f", + "pouting_man_medium_dark_skin_tone": "1f64e-1f3fe-200d-2642-fe0f", + "man-pouting_dark_skin_tone": "1f64e-1f3ff-200d-2642-fe0f", + "pouting_man_dark_skin_tone": "1f64e-1f3ff-200d-2642-fe0f", + "person_with_pouting_face_light_skin_tone": "1f64e-1f3fb", + "person_with_pouting_face_medium_light_skin_tone": "1f64e-1f3fc", + "person_with_pouting_face_medium_skin_tone": "1f64e-1f3fd", + "person_with_pouting_face_medium_dark_skin_tone": "1f64e-1f3fe", + "person_with_pouting_face_dark_skin_tone": "1f64e-1f3ff", + "pray_light_skin_tone": "1f64f-1f3fb", + "pray_medium_light_skin_tone": "1f64f-1f3fc", + "pray_medium_skin_tone": "1f64f-1f3fd", + "pray_medium_dark_skin_tone": "1f64f-1f3fe", + "pray_dark_skin_tone": "1f64f-1f3ff", + "woman-rowing-boat_light_skin_tone": "1f6a3-1f3fb-200d-2640-fe0f", + "rowing_woman_light_skin_tone": "1f6a3-1f3fb-200d-2640-fe0f", + "woman-rowing-boat_medium_light_skin_tone": "1f6a3-1f3fc-200d-2640-fe0f", + "rowing_woman_medium_light_skin_tone": "1f6a3-1f3fc-200d-2640-fe0f", + "woman-rowing-boat_medium_skin_tone": "1f6a3-1f3fd-200d-2640-fe0f", + "rowing_woman_medium_skin_tone": "1f6a3-1f3fd-200d-2640-fe0f", + "woman-rowing-boat_medium_dark_skin_tone": "1f6a3-1f3fe-200d-2640-fe0f", + "rowing_woman_medium_dark_skin_tone": "1f6a3-1f3fe-200d-2640-fe0f", + "woman-rowing-boat_dark_skin_tone": "1f6a3-1f3ff-200d-2640-fe0f", + "rowing_woman_dark_skin_tone": "1f6a3-1f3ff-200d-2640-fe0f", + "man-rowing-boat_light_skin_tone": "1f6a3-1f3fb-200d-2642-fe0f", + "rowing_man_light_skin_tone": "1f6a3-1f3fb-200d-2642-fe0f", + "man-rowing-boat_medium_light_skin_tone": "1f6a3-1f3fc-200d-2642-fe0f", + "rowing_man_medium_light_skin_tone": "1f6a3-1f3fc-200d-2642-fe0f", + "man-rowing-boat_medium_skin_tone": "1f6a3-1f3fd-200d-2642-fe0f", + "rowing_man_medium_skin_tone": "1f6a3-1f3fd-200d-2642-fe0f", + "man-rowing-boat_medium_dark_skin_tone": "1f6a3-1f3fe-200d-2642-fe0f", + "rowing_man_medium_dark_skin_tone": "1f6a3-1f3fe-200d-2642-fe0f", + "man-rowing-boat_dark_skin_tone": "1f6a3-1f3ff-200d-2642-fe0f", + "rowing_man_dark_skin_tone": "1f6a3-1f3ff-200d-2642-fe0f", + "rowboat_light_skin_tone": "1f6a3-1f3fb", + "rowboat_medium_light_skin_tone": "1f6a3-1f3fc", + "rowboat_medium_skin_tone": "1f6a3-1f3fd", + "rowboat_medium_dark_skin_tone": "1f6a3-1f3fe", + "rowboat_dark_skin_tone": "1f6a3-1f3ff", + "woman-biking_light_skin_tone": "1f6b4-1f3fb-200d-2640-fe0f", + "biking_woman_light_skin_tone": "1f6b4-1f3fb-200d-2640-fe0f", + "woman-biking_medium_light_skin_tone": "1f6b4-1f3fc-200d-2640-fe0f", + "biking_woman_medium_light_skin_tone": "1f6b4-1f3fc-200d-2640-fe0f", + "woman-biking_medium_skin_tone": "1f6b4-1f3fd-200d-2640-fe0f", + "biking_woman_medium_skin_tone": "1f6b4-1f3fd-200d-2640-fe0f", + "woman-biking_medium_dark_skin_tone": "1f6b4-1f3fe-200d-2640-fe0f", + "biking_woman_medium_dark_skin_tone": "1f6b4-1f3fe-200d-2640-fe0f", + "woman-biking_dark_skin_tone": "1f6b4-1f3ff-200d-2640-fe0f", + "biking_woman_dark_skin_tone": "1f6b4-1f3ff-200d-2640-fe0f", + "man-biking_light_skin_tone": "1f6b4-1f3fb-200d-2642-fe0f", + "biking_man_light_skin_tone": "1f6b4-1f3fb-200d-2642-fe0f", + "man-biking_medium_light_skin_tone": "1f6b4-1f3fc-200d-2642-fe0f", + "biking_man_medium_light_skin_tone": "1f6b4-1f3fc-200d-2642-fe0f", + "man-biking_medium_skin_tone": "1f6b4-1f3fd-200d-2642-fe0f", + "biking_man_medium_skin_tone": "1f6b4-1f3fd-200d-2642-fe0f", + "man-biking_medium_dark_skin_tone": "1f6b4-1f3fe-200d-2642-fe0f", + "biking_man_medium_dark_skin_tone": "1f6b4-1f3fe-200d-2642-fe0f", + "man-biking_dark_skin_tone": "1f6b4-1f3ff-200d-2642-fe0f", + "biking_man_dark_skin_tone": "1f6b4-1f3ff-200d-2642-fe0f", + "bicyclist_light_skin_tone": "1f6b4-1f3fb", + "bicyclist_medium_light_skin_tone": "1f6b4-1f3fc", + "bicyclist_medium_skin_tone": "1f6b4-1f3fd", + "bicyclist_medium_dark_skin_tone": "1f6b4-1f3fe", + "bicyclist_dark_skin_tone": "1f6b4-1f3ff", + "woman-mountain-biking_light_skin_tone": "1f6b5-1f3fb-200d-2640-fe0f", + "mountain_biking_woman_light_skin_tone": "1f6b5-1f3fb-200d-2640-fe0f", + "woman-mountain-biking_medium_light_skin_tone": "1f6b5-1f3fc-200d-2640-fe0f", + "mountain_biking_woman_medium_light_skin_tone": "1f6b5-1f3fc-200d-2640-fe0f", + "woman-mountain-biking_medium_skin_tone": "1f6b5-1f3fd-200d-2640-fe0f", + "mountain_biking_woman_medium_skin_tone": "1f6b5-1f3fd-200d-2640-fe0f", + "woman-mountain-biking_medium_dark_skin_tone": "1f6b5-1f3fe-200d-2640-fe0f", + "mountain_biking_woman_medium_dark_skin_tone": "1f6b5-1f3fe-200d-2640-fe0f", + "woman-mountain-biking_dark_skin_tone": "1f6b5-1f3ff-200d-2640-fe0f", + "mountain_biking_woman_dark_skin_tone": "1f6b5-1f3ff-200d-2640-fe0f", + "man-mountain-biking_light_skin_tone": "1f6b5-1f3fb-200d-2642-fe0f", + "mountain_biking_man_light_skin_tone": "1f6b5-1f3fb-200d-2642-fe0f", + "man-mountain-biking_medium_light_skin_tone": "1f6b5-1f3fc-200d-2642-fe0f", + "mountain_biking_man_medium_light_skin_tone": "1f6b5-1f3fc-200d-2642-fe0f", + "man-mountain-biking_medium_skin_tone": "1f6b5-1f3fd-200d-2642-fe0f", + "mountain_biking_man_medium_skin_tone": "1f6b5-1f3fd-200d-2642-fe0f", + "man-mountain-biking_medium_dark_skin_tone": "1f6b5-1f3fe-200d-2642-fe0f", + "mountain_biking_man_medium_dark_skin_tone": "1f6b5-1f3fe-200d-2642-fe0f", + "man-mountain-biking_dark_skin_tone": "1f6b5-1f3ff-200d-2642-fe0f", + "mountain_biking_man_dark_skin_tone": "1f6b5-1f3ff-200d-2642-fe0f", + "mountain_bicyclist_light_skin_tone": "1f6b5-1f3fb", + "mountain_bicyclist_medium_light_skin_tone": "1f6b5-1f3fc", + "mountain_bicyclist_medium_skin_tone": "1f6b5-1f3fd", + "mountain_bicyclist_medium_dark_skin_tone": "1f6b5-1f3fe", + "mountain_bicyclist_dark_skin_tone": "1f6b5-1f3ff", + "woman-walking_light_skin_tone": "1f6b6-1f3fb-200d-2640-fe0f", + "walking_woman_light_skin_tone": "1f6b6-1f3fb-200d-2640-fe0f", + "woman-walking_medium_light_skin_tone": "1f6b6-1f3fc-200d-2640-fe0f", + "walking_woman_medium_light_skin_tone": "1f6b6-1f3fc-200d-2640-fe0f", + "woman-walking_medium_skin_tone": "1f6b6-1f3fd-200d-2640-fe0f", + "walking_woman_medium_skin_tone": "1f6b6-1f3fd-200d-2640-fe0f", + "woman-walking_medium_dark_skin_tone": "1f6b6-1f3fe-200d-2640-fe0f", + "walking_woman_medium_dark_skin_tone": "1f6b6-1f3fe-200d-2640-fe0f", + "woman-walking_dark_skin_tone": "1f6b6-1f3ff-200d-2640-fe0f", + "walking_woman_dark_skin_tone": "1f6b6-1f3ff-200d-2640-fe0f", + "man-walking_light_skin_tone": "1f6b6-1f3fb-200d-2642-fe0f", + "walking_man_light_skin_tone": "1f6b6-1f3fb-200d-2642-fe0f", + "man-walking_medium_light_skin_tone": "1f6b6-1f3fc-200d-2642-fe0f", + "walking_man_medium_light_skin_tone": "1f6b6-1f3fc-200d-2642-fe0f", + "man-walking_medium_skin_tone": "1f6b6-1f3fd-200d-2642-fe0f", + "walking_man_medium_skin_tone": "1f6b6-1f3fd-200d-2642-fe0f", + "man-walking_medium_dark_skin_tone": "1f6b6-1f3fe-200d-2642-fe0f", + "walking_man_medium_dark_skin_tone": "1f6b6-1f3fe-200d-2642-fe0f", + "man-walking_dark_skin_tone": "1f6b6-1f3ff-200d-2642-fe0f", + "walking_man_dark_skin_tone": "1f6b6-1f3ff-200d-2642-fe0f", + "walking_light_skin_tone": "1f6b6-1f3fb", + "walking_medium_light_skin_tone": "1f6b6-1f3fc", + "walking_medium_skin_tone": "1f6b6-1f3fd", + "walking_medium_dark_skin_tone": "1f6b6-1f3fe", + "walking_dark_skin_tone": "1f6b6-1f3ff", + "bath_light_skin_tone": "1f6c0-1f3fb", + "bath_medium_light_skin_tone": "1f6c0-1f3fc", + "bath_medium_skin_tone": "1f6c0-1f3fd", + "bath_medium_dark_skin_tone": "1f6c0-1f3fe", + "bath_dark_skin_tone": "1f6c0-1f3ff", + "sleeping_accommodation_light_skin_tone": "1f6cc-1f3fb", + "sleeping_accommodation_medium_light_skin_tone": "1f6cc-1f3fc", + "sleeping_accommodation_medium_skin_tone": "1f6cc-1f3fd", + "sleeping_accommodation_medium_dark_skin_tone": "1f6cc-1f3fe", + "sleeping_accommodation_dark_skin_tone": "1f6cc-1f3ff", + "pinched_fingers_light_skin_tone": "1f90c-1f3fb", + "pinched_fingers_medium_light_skin_tone": "1f90c-1f3fc", + "pinched_fingers_medium_skin_tone": "1f90c-1f3fd", + "pinched_fingers_medium_dark_skin_tone": "1f90c-1f3fe", + "pinched_fingers_dark_skin_tone": "1f90c-1f3ff", + "pinching_hand_light_skin_tone": "1f90f-1f3fb", + "pinching_hand_medium_light_skin_tone": "1f90f-1f3fc", + "pinching_hand_medium_skin_tone": "1f90f-1f3fd", + "pinching_hand_medium_dark_skin_tone": "1f90f-1f3fe", + "pinching_hand_dark_skin_tone": "1f90f-1f3ff", + "the_horns_light_skin_tone": "1f918-1f3fb", + "sign_of_the_horns_light_skin_tone": "1f918-1f3fb", + "metal_light_skin_tone": "1f918-1f3fb", + "the_horns_medium_light_skin_tone": "1f918-1f3fc", + "sign_of_the_horns_medium_light_skin_tone": "1f918-1f3fc", + "metal_medium_light_skin_tone": "1f918-1f3fc", + "the_horns_medium_skin_tone": "1f918-1f3fd", + "sign_of_the_horns_medium_skin_tone": "1f918-1f3fd", + "metal_medium_skin_tone": "1f918-1f3fd", + "the_horns_medium_dark_skin_tone": "1f918-1f3fe", + "sign_of_the_horns_medium_dark_skin_tone": "1f918-1f3fe", + "metal_medium_dark_skin_tone": "1f918-1f3fe", + "the_horns_dark_skin_tone": "1f918-1f3ff", + "sign_of_the_horns_dark_skin_tone": "1f918-1f3ff", + "metal_dark_skin_tone": "1f918-1f3ff", + "call_me_hand_light_skin_tone": "1f919-1f3fb", + "call_me_hand_medium_light_skin_tone": "1f919-1f3fc", + "call_me_hand_medium_skin_tone": "1f919-1f3fd", + "call_me_hand_medium_dark_skin_tone": "1f919-1f3fe", + "call_me_hand_dark_skin_tone": "1f919-1f3ff", + "raised_back_of_hand_light_skin_tone": "1f91a-1f3fb", + "raised_back_of_hand_medium_light_skin_tone": "1f91a-1f3fc", + "raised_back_of_hand_medium_skin_tone": "1f91a-1f3fd", + "raised_back_of_hand_medium_dark_skin_tone": "1f91a-1f3fe", + "raised_back_of_hand_dark_skin_tone": "1f91a-1f3ff", + "left-facing_fist_light_skin_tone": "1f91b-1f3fb", + "fist_left_light_skin_tone": "1f91b-1f3fb", + "left-facing_fist_medium_light_skin_tone": "1f91b-1f3fc", + "fist_left_medium_light_skin_tone": "1f91b-1f3fc", + "left-facing_fist_medium_skin_tone": "1f91b-1f3fd", + "fist_left_medium_skin_tone": "1f91b-1f3fd", + "left-facing_fist_medium_dark_skin_tone": "1f91b-1f3fe", + "fist_left_medium_dark_skin_tone": "1f91b-1f3fe", + "left-facing_fist_dark_skin_tone": "1f91b-1f3ff", + "fist_left_dark_skin_tone": "1f91b-1f3ff", + "right-facing_fist_light_skin_tone": "1f91c-1f3fb", + "fist_right_light_skin_tone": "1f91c-1f3fb", + "right-facing_fist_medium_light_skin_tone": "1f91c-1f3fc", + "fist_right_medium_light_skin_tone": "1f91c-1f3fc", + "right-facing_fist_medium_skin_tone": "1f91c-1f3fd", + "fist_right_medium_skin_tone": "1f91c-1f3fd", + "right-facing_fist_medium_dark_skin_tone": "1f91c-1f3fe", + "fist_right_medium_dark_skin_tone": "1f91c-1f3fe", + "right-facing_fist_dark_skin_tone": "1f91c-1f3ff", + "fist_right_dark_skin_tone": "1f91c-1f3ff", + "crossed_fingers_light_skin_tone": "1f91e-1f3fb", + "hand_with_index_and_middle_fingers_crossed_light_skin_tone": "1f91e-1f3fb", + "crossed_fingers_medium_light_skin_tone": "1f91e-1f3fc", + "hand_with_index_and_middle_fingers_crossed_medium_light_skin_tone": "1f91e-1f3fc", + "crossed_fingers_medium_skin_tone": "1f91e-1f3fd", + "hand_with_index_and_middle_fingers_crossed_medium_skin_tone": "1f91e-1f3fd", + "crossed_fingers_medium_dark_skin_tone": "1f91e-1f3fe", + "hand_with_index_and_middle_fingers_crossed_medium_dark_skin_tone": "1f91e-1f3fe", + "crossed_fingers_dark_skin_tone": "1f91e-1f3ff", + "hand_with_index_and_middle_fingers_crossed_dark_skin_tone": "1f91e-1f3ff", + "i_love_you_hand_sign_light_skin_tone": "1f91f-1f3fb", + "i_love_you_hand_sign_medium_light_skin_tone": "1f91f-1f3fc", + "i_love_you_hand_sign_medium_skin_tone": "1f91f-1f3fd", + "i_love_you_hand_sign_medium_dark_skin_tone": "1f91f-1f3fe", + "i_love_you_hand_sign_dark_skin_tone": "1f91f-1f3ff", + "woman-facepalming_light_skin_tone": "1f926-1f3fb-200d-2640-fe0f", + "woman_facepalming_light_skin_tone": "1f926-1f3fb-200d-2640-fe0f", + "woman-facepalming_medium_light_skin_tone": "1f926-1f3fc-200d-2640-fe0f", + "woman_facepalming_medium_light_skin_tone": "1f926-1f3fc-200d-2640-fe0f", + "woman-facepalming_medium_skin_tone": "1f926-1f3fd-200d-2640-fe0f", + "woman_facepalming_medium_skin_tone": "1f926-1f3fd-200d-2640-fe0f", + "woman-facepalming_medium_dark_skin_tone": "1f926-1f3fe-200d-2640-fe0f", + "woman_facepalming_medium_dark_skin_tone": "1f926-1f3fe-200d-2640-fe0f", + "woman-facepalming_dark_skin_tone": "1f926-1f3ff-200d-2640-fe0f", + "woman_facepalming_dark_skin_tone": "1f926-1f3ff-200d-2640-fe0f", + "man-facepalming_light_skin_tone": "1f926-1f3fb-200d-2642-fe0f", + "man_facepalming_light_skin_tone": "1f926-1f3fb-200d-2642-fe0f", + "man-facepalming_medium_light_skin_tone": "1f926-1f3fc-200d-2642-fe0f", + "man_facepalming_medium_light_skin_tone": "1f926-1f3fc-200d-2642-fe0f", + "man-facepalming_medium_skin_tone": "1f926-1f3fd-200d-2642-fe0f", + "man_facepalming_medium_skin_tone": "1f926-1f3fd-200d-2642-fe0f", + "man-facepalming_medium_dark_skin_tone": "1f926-1f3fe-200d-2642-fe0f", + "man_facepalming_medium_dark_skin_tone": "1f926-1f3fe-200d-2642-fe0f", + "man-facepalming_dark_skin_tone": "1f926-1f3ff-200d-2642-fe0f", + "man_facepalming_dark_skin_tone": "1f926-1f3ff-200d-2642-fe0f", + "face_palm_light_skin_tone": "1f926-1f3fb", + "face_palm_medium_light_skin_tone": "1f926-1f3fc", + "face_palm_medium_skin_tone": "1f926-1f3fd", + "face_palm_medium_dark_skin_tone": "1f926-1f3fe", + "face_palm_dark_skin_tone": "1f926-1f3ff", + "pregnant_woman_light_skin_tone": "1f930-1f3fb", + "pregnant_woman_medium_light_skin_tone": "1f930-1f3fc", + "pregnant_woman_medium_skin_tone": "1f930-1f3fd", + "pregnant_woman_medium_dark_skin_tone": "1f930-1f3fe", + "pregnant_woman_dark_skin_tone": "1f930-1f3ff", + "breast-feeding_light_skin_tone": "1f931-1f3fb", + "breast-feeding_medium_light_skin_tone": "1f931-1f3fc", + "breast-feeding_medium_skin_tone": "1f931-1f3fd", + "breast-feeding_medium_dark_skin_tone": "1f931-1f3fe", + "breast-feeding_dark_skin_tone": "1f931-1f3ff", + "palms_up_together_light_skin_tone": "1f932-1f3fb", + "palms_up_together_medium_light_skin_tone": "1f932-1f3fc", + "palms_up_together_medium_skin_tone": "1f932-1f3fd", + "palms_up_together_medium_dark_skin_tone": "1f932-1f3fe", + "palms_up_together_dark_skin_tone": "1f932-1f3ff", + "selfie_light_skin_tone": "1f933-1f3fb", + "selfie_medium_light_skin_tone": "1f933-1f3fc", + "selfie_medium_skin_tone": "1f933-1f3fd", + "selfie_medium_dark_skin_tone": "1f933-1f3fe", + "selfie_dark_skin_tone": "1f933-1f3ff", + "prince_light_skin_tone": "1f934-1f3fb", + "prince_medium_light_skin_tone": "1f934-1f3fc", + "prince_medium_skin_tone": "1f934-1f3fd", + "prince_medium_dark_skin_tone": "1f934-1f3fe", + "prince_dark_skin_tone": "1f934-1f3ff", + "woman_in_tuxedo_light_skin_tone": "1f935-1f3fb-200d-2640-fe0f", + "woman_in_tuxedo_medium_light_skin_tone": "1f935-1f3fc-200d-2640-fe0f", + "woman_in_tuxedo_medium_skin_tone": "1f935-1f3fd-200d-2640-fe0f", + "woman_in_tuxedo_medium_dark_skin_tone": "1f935-1f3fe-200d-2640-fe0f", + "woman_in_tuxedo_dark_skin_tone": "1f935-1f3ff-200d-2640-fe0f", + "man_in_tuxedo_light_skin_tone": "1f935-1f3fb-200d-2642-fe0f", + "man_in_tuxedo_medium_light_skin_tone": "1f935-1f3fc-200d-2642-fe0f", + "man_in_tuxedo_medium_skin_tone": "1f935-1f3fd-200d-2642-fe0f", + "man_in_tuxedo_medium_dark_skin_tone": "1f935-1f3fe-200d-2642-fe0f", + "man_in_tuxedo_dark_skin_tone": "1f935-1f3ff-200d-2642-fe0f", + "person_in_tuxedo_light_skin_tone": "1f935-1f3fb", + "person_in_tuxedo_medium_light_skin_tone": "1f935-1f3fc", + "person_in_tuxedo_medium_skin_tone": "1f935-1f3fd", + "person_in_tuxedo_medium_dark_skin_tone": "1f935-1f3fe", + "person_in_tuxedo_dark_skin_tone": "1f935-1f3ff", + "mrs_claus_light_skin_tone": "1f936-1f3fb", + "mother_christmas_light_skin_tone": "1f936-1f3fb", + "mrs_claus_medium_light_skin_tone": "1f936-1f3fc", + "mother_christmas_medium_light_skin_tone": "1f936-1f3fc", + "mrs_claus_medium_skin_tone": "1f936-1f3fd", + "mother_christmas_medium_skin_tone": "1f936-1f3fd", + "mrs_claus_medium_dark_skin_tone": "1f936-1f3fe", + "mother_christmas_medium_dark_skin_tone": "1f936-1f3fe", + "mrs_claus_dark_skin_tone": "1f936-1f3ff", + "mother_christmas_dark_skin_tone": "1f936-1f3ff", + "woman-shrugging_light_skin_tone": "1f937-1f3fb-200d-2640-fe0f", + "woman_shrugging_light_skin_tone": "1f937-1f3fb-200d-2640-fe0f", + "woman-shrugging_medium_light_skin_tone": "1f937-1f3fc-200d-2640-fe0f", + "woman_shrugging_medium_light_skin_tone": "1f937-1f3fc-200d-2640-fe0f", + "woman-shrugging_medium_skin_tone": "1f937-1f3fd-200d-2640-fe0f", + "woman_shrugging_medium_skin_tone": "1f937-1f3fd-200d-2640-fe0f", + "woman-shrugging_medium_dark_skin_tone": "1f937-1f3fe-200d-2640-fe0f", + "woman_shrugging_medium_dark_skin_tone": "1f937-1f3fe-200d-2640-fe0f", + "woman-shrugging_dark_skin_tone": "1f937-1f3ff-200d-2640-fe0f", + "woman_shrugging_dark_skin_tone": "1f937-1f3ff-200d-2640-fe0f", + "man-shrugging_light_skin_tone": "1f937-1f3fb-200d-2642-fe0f", + "man_shrugging_light_skin_tone": "1f937-1f3fb-200d-2642-fe0f", + "man-shrugging_medium_light_skin_tone": "1f937-1f3fc-200d-2642-fe0f", + "man_shrugging_medium_light_skin_tone": "1f937-1f3fc-200d-2642-fe0f", + "man-shrugging_medium_skin_tone": "1f937-1f3fd-200d-2642-fe0f", + "man_shrugging_medium_skin_tone": "1f937-1f3fd-200d-2642-fe0f", + "man-shrugging_medium_dark_skin_tone": "1f937-1f3fe-200d-2642-fe0f", + "man_shrugging_medium_dark_skin_tone": "1f937-1f3fe-200d-2642-fe0f", + "man-shrugging_dark_skin_tone": "1f937-1f3ff-200d-2642-fe0f", + "man_shrugging_dark_skin_tone": "1f937-1f3ff-200d-2642-fe0f", + "shrug_light_skin_tone": "1f937-1f3fb", + "shrug_medium_light_skin_tone": "1f937-1f3fc", + "shrug_medium_skin_tone": "1f937-1f3fd", + "shrug_medium_dark_skin_tone": "1f937-1f3fe", + "shrug_dark_skin_tone": "1f937-1f3ff", + "woman-cartwheeling_light_skin_tone": "1f938-1f3fb-200d-2640-fe0f", + "woman_cartwheeling_light_skin_tone": "1f938-1f3fb-200d-2640-fe0f", + "woman-cartwheeling_medium_light_skin_tone": "1f938-1f3fc-200d-2640-fe0f", + "woman_cartwheeling_medium_light_skin_tone": "1f938-1f3fc-200d-2640-fe0f", + "woman-cartwheeling_medium_skin_tone": "1f938-1f3fd-200d-2640-fe0f", + "woman_cartwheeling_medium_skin_tone": "1f938-1f3fd-200d-2640-fe0f", + "woman-cartwheeling_medium_dark_skin_tone": "1f938-1f3fe-200d-2640-fe0f", + "woman_cartwheeling_medium_dark_skin_tone": "1f938-1f3fe-200d-2640-fe0f", + "woman-cartwheeling_dark_skin_tone": "1f938-1f3ff-200d-2640-fe0f", + "woman_cartwheeling_dark_skin_tone": "1f938-1f3ff-200d-2640-fe0f", + "man-cartwheeling_light_skin_tone": "1f938-1f3fb-200d-2642-fe0f", + "man_cartwheeling_light_skin_tone": "1f938-1f3fb-200d-2642-fe0f", + "man-cartwheeling_medium_light_skin_tone": "1f938-1f3fc-200d-2642-fe0f", + "man_cartwheeling_medium_light_skin_tone": "1f938-1f3fc-200d-2642-fe0f", + "man-cartwheeling_medium_skin_tone": "1f938-1f3fd-200d-2642-fe0f", + "man_cartwheeling_medium_skin_tone": "1f938-1f3fd-200d-2642-fe0f", + "man-cartwheeling_medium_dark_skin_tone": "1f938-1f3fe-200d-2642-fe0f", + "man_cartwheeling_medium_dark_skin_tone": "1f938-1f3fe-200d-2642-fe0f", + "man-cartwheeling_dark_skin_tone": "1f938-1f3ff-200d-2642-fe0f", + "man_cartwheeling_dark_skin_tone": "1f938-1f3ff-200d-2642-fe0f", + "person_doing_cartwheel_light_skin_tone": "1f938-1f3fb", + "person_doing_cartwheel_medium_light_skin_tone": "1f938-1f3fc", + "person_doing_cartwheel_medium_skin_tone": "1f938-1f3fd", + "person_doing_cartwheel_medium_dark_skin_tone": "1f938-1f3fe", + "person_doing_cartwheel_dark_skin_tone": "1f938-1f3ff", + "woman-juggling_light_skin_tone": "1f939-1f3fb-200d-2640-fe0f", + "woman_juggling_light_skin_tone": "1f939-1f3fb-200d-2640-fe0f", + "woman-juggling_medium_light_skin_tone": "1f939-1f3fc-200d-2640-fe0f", + "woman_juggling_medium_light_skin_tone": "1f939-1f3fc-200d-2640-fe0f", + "woman-juggling_medium_skin_tone": "1f939-1f3fd-200d-2640-fe0f", + "woman_juggling_medium_skin_tone": "1f939-1f3fd-200d-2640-fe0f", + "woman-juggling_medium_dark_skin_tone": "1f939-1f3fe-200d-2640-fe0f", + "woman_juggling_medium_dark_skin_tone": "1f939-1f3fe-200d-2640-fe0f", + "woman-juggling_dark_skin_tone": "1f939-1f3ff-200d-2640-fe0f", + "woman_juggling_dark_skin_tone": "1f939-1f3ff-200d-2640-fe0f", + "man-juggling_light_skin_tone": "1f939-1f3fb-200d-2642-fe0f", + "man_juggling_light_skin_tone": "1f939-1f3fb-200d-2642-fe0f", + "man-juggling_medium_light_skin_tone": "1f939-1f3fc-200d-2642-fe0f", + "man_juggling_medium_light_skin_tone": "1f939-1f3fc-200d-2642-fe0f", + "man-juggling_medium_skin_tone": "1f939-1f3fd-200d-2642-fe0f", + "man_juggling_medium_skin_tone": "1f939-1f3fd-200d-2642-fe0f", + "man-juggling_medium_dark_skin_tone": "1f939-1f3fe-200d-2642-fe0f", + "man_juggling_medium_dark_skin_tone": "1f939-1f3fe-200d-2642-fe0f", + "man-juggling_dark_skin_tone": "1f939-1f3ff-200d-2642-fe0f", + "man_juggling_dark_skin_tone": "1f939-1f3ff-200d-2642-fe0f", + "juggling_light_skin_tone": "1f939-1f3fb", + "juggling_medium_light_skin_tone": "1f939-1f3fc", + "juggling_medium_skin_tone": "1f939-1f3fd", + "juggling_medium_dark_skin_tone": "1f939-1f3fe", + "juggling_dark_skin_tone": "1f939-1f3ff", + "woman-playing-water-polo_light_skin_tone": "1f93d-1f3fb-200d-2640-fe0f", + "woman_playing_water_polo_light_skin_tone": "1f93d-1f3fb-200d-2640-fe0f", + "woman-playing-water-polo_medium_light_skin_tone": "1f93d-1f3fc-200d-2640-fe0f", + "woman_playing_water_polo_medium_light_skin_tone": "1f93d-1f3fc-200d-2640-fe0f", + "woman-playing-water-polo_medium_skin_tone": "1f93d-1f3fd-200d-2640-fe0f", + "woman_playing_water_polo_medium_skin_tone": "1f93d-1f3fd-200d-2640-fe0f", + "woman-playing-water-polo_medium_dark_skin_tone": "1f93d-1f3fe-200d-2640-fe0f", + "woman_playing_water_polo_medium_dark_skin_tone": "1f93d-1f3fe-200d-2640-fe0f", + "woman-playing-water-polo_dark_skin_tone": "1f93d-1f3ff-200d-2640-fe0f", + "woman_playing_water_polo_dark_skin_tone": "1f93d-1f3ff-200d-2640-fe0f", + "man-playing-water-polo_light_skin_tone": "1f93d-1f3fb-200d-2642-fe0f", + "man_playing_water_polo_light_skin_tone": "1f93d-1f3fb-200d-2642-fe0f", + "man-playing-water-polo_medium_light_skin_tone": "1f93d-1f3fc-200d-2642-fe0f", + "man_playing_water_polo_medium_light_skin_tone": "1f93d-1f3fc-200d-2642-fe0f", + "man-playing-water-polo_medium_skin_tone": "1f93d-1f3fd-200d-2642-fe0f", + "man_playing_water_polo_medium_skin_tone": "1f93d-1f3fd-200d-2642-fe0f", + "man-playing-water-polo_medium_dark_skin_tone": "1f93d-1f3fe-200d-2642-fe0f", + "man_playing_water_polo_medium_dark_skin_tone": "1f93d-1f3fe-200d-2642-fe0f", + "man-playing-water-polo_dark_skin_tone": "1f93d-1f3ff-200d-2642-fe0f", + "man_playing_water_polo_dark_skin_tone": "1f93d-1f3ff-200d-2642-fe0f", + "water_polo_light_skin_tone": "1f93d-1f3fb", + "water_polo_medium_light_skin_tone": "1f93d-1f3fc", + "water_polo_medium_skin_tone": "1f93d-1f3fd", + "water_polo_medium_dark_skin_tone": "1f93d-1f3fe", + "water_polo_dark_skin_tone": "1f93d-1f3ff", + "woman-playing-handball_light_skin_tone": "1f93e-1f3fb-200d-2640-fe0f", + "woman_playing_handball_light_skin_tone": "1f93e-1f3fb-200d-2640-fe0f", + "woman-playing-handball_medium_light_skin_tone": "1f93e-1f3fc-200d-2640-fe0f", + "woman_playing_handball_medium_light_skin_tone": "1f93e-1f3fc-200d-2640-fe0f", + "woman-playing-handball_medium_skin_tone": "1f93e-1f3fd-200d-2640-fe0f", + "woman_playing_handball_medium_skin_tone": "1f93e-1f3fd-200d-2640-fe0f", + "woman-playing-handball_medium_dark_skin_tone": "1f93e-1f3fe-200d-2640-fe0f", + "woman_playing_handball_medium_dark_skin_tone": "1f93e-1f3fe-200d-2640-fe0f", + "woman-playing-handball_dark_skin_tone": "1f93e-1f3ff-200d-2640-fe0f", + "woman_playing_handball_dark_skin_tone": "1f93e-1f3ff-200d-2640-fe0f", + "man-playing-handball_light_skin_tone": "1f93e-1f3fb-200d-2642-fe0f", + "man_playing_handball_light_skin_tone": "1f93e-1f3fb-200d-2642-fe0f", + "man-playing-handball_medium_light_skin_tone": "1f93e-1f3fc-200d-2642-fe0f", + "man_playing_handball_medium_light_skin_tone": "1f93e-1f3fc-200d-2642-fe0f", + "man-playing-handball_medium_skin_tone": "1f93e-1f3fd-200d-2642-fe0f", + "man_playing_handball_medium_skin_tone": "1f93e-1f3fd-200d-2642-fe0f", + "man-playing-handball_medium_dark_skin_tone": "1f93e-1f3fe-200d-2642-fe0f", + "man_playing_handball_medium_dark_skin_tone": "1f93e-1f3fe-200d-2642-fe0f", + "man-playing-handball_dark_skin_tone": "1f93e-1f3ff-200d-2642-fe0f", + "man_playing_handball_dark_skin_tone": "1f93e-1f3ff-200d-2642-fe0f", + "handball_light_skin_tone": "1f93e-1f3fb", + "handball_medium_light_skin_tone": "1f93e-1f3fc", + "handball_medium_skin_tone": "1f93e-1f3fd", + "handball_medium_dark_skin_tone": "1f93e-1f3fe", + "handball_dark_skin_tone": "1f93e-1f3ff", + "ninja_light_skin_tone": "1f977-1f3fb", + "ninja_medium_light_skin_tone": "1f977-1f3fc", + "ninja_medium_skin_tone": "1f977-1f3fd", + "ninja_medium_dark_skin_tone": "1f977-1f3fe", + "ninja_dark_skin_tone": "1f977-1f3ff", + "leg_light_skin_tone": "1f9b5-1f3fb", + "leg_medium_light_skin_tone": "1f9b5-1f3fc", + "leg_medium_skin_tone": "1f9b5-1f3fd", + "leg_medium_dark_skin_tone": "1f9b5-1f3fe", + "leg_dark_skin_tone": "1f9b5-1f3ff", + "foot_light_skin_tone": "1f9b6-1f3fb", + "foot_medium_light_skin_tone": "1f9b6-1f3fc", + "foot_medium_skin_tone": "1f9b6-1f3fd", + "foot_medium_dark_skin_tone": "1f9b6-1f3fe", + "foot_dark_skin_tone": "1f9b6-1f3ff", + "female_superhero_light_skin_tone": "1f9b8-1f3fb-200d-2640-fe0f", + "female_superhero_medium_light_skin_tone": "1f9b8-1f3fc-200d-2640-fe0f", + "female_superhero_medium_skin_tone": "1f9b8-1f3fd-200d-2640-fe0f", + "female_superhero_medium_dark_skin_tone": "1f9b8-1f3fe-200d-2640-fe0f", + "female_superhero_dark_skin_tone": "1f9b8-1f3ff-200d-2640-fe0f", + "male_superhero_light_skin_tone": "1f9b8-1f3fb-200d-2642-fe0f", + "male_superhero_medium_light_skin_tone": "1f9b8-1f3fc-200d-2642-fe0f", + "male_superhero_medium_skin_tone": "1f9b8-1f3fd-200d-2642-fe0f", + "male_superhero_medium_dark_skin_tone": "1f9b8-1f3fe-200d-2642-fe0f", + "male_superhero_dark_skin_tone": "1f9b8-1f3ff-200d-2642-fe0f", + "superhero_light_skin_tone": "1f9b8-1f3fb", + "superhero_medium_light_skin_tone": "1f9b8-1f3fc", + "superhero_medium_skin_tone": "1f9b8-1f3fd", + "superhero_medium_dark_skin_tone": "1f9b8-1f3fe", + "superhero_dark_skin_tone": "1f9b8-1f3ff", + "female_supervillain_light_skin_tone": "1f9b9-1f3fb-200d-2640-fe0f", + "female_supervillain_medium_light_skin_tone": "1f9b9-1f3fc-200d-2640-fe0f", + "female_supervillain_medium_skin_tone": "1f9b9-1f3fd-200d-2640-fe0f", + "female_supervillain_medium_dark_skin_tone": "1f9b9-1f3fe-200d-2640-fe0f", + "female_supervillain_dark_skin_tone": "1f9b9-1f3ff-200d-2640-fe0f", + "male_supervillain_light_skin_tone": "1f9b9-1f3fb-200d-2642-fe0f", + "male_supervillain_medium_light_skin_tone": "1f9b9-1f3fc-200d-2642-fe0f", + "male_supervillain_medium_skin_tone": "1f9b9-1f3fd-200d-2642-fe0f", + "male_supervillain_medium_dark_skin_tone": "1f9b9-1f3fe-200d-2642-fe0f", + "male_supervillain_dark_skin_tone": "1f9b9-1f3ff-200d-2642-fe0f", + "supervillain_light_skin_tone": "1f9b9-1f3fb", + "supervillain_medium_light_skin_tone": "1f9b9-1f3fc", + "supervillain_medium_skin_tone": "1f9b9-1f3fd", + "supervillain_medium_dark_skin_tone": "1f9b9-1f3fe", + "supervillain_dark_skin_tone": "1f9b9-1f3ff", + "ear_with_hearing_aid_light_skin_tone": "1f9bb-1f3fb", + "ear_with_hearing_aid_medium_light_skin_tone": "1f9bb-1f3fc", + "ear_with_hearing_aid_medium_skin_tone": "1f9bb-1f3fd", + "ear_with_hearing_aid_medium_dark_skin_tone": "1f9bb-1f3fe", + "ear_with_hearing_aid_dark_skin_tone": "1f9bb-1f3ff", + "woman_standing_light_skin_tone": "1f9cd-1f3fb-200d-2640-fe0f", + "woman_standing_medium_light_skin_tone": "1f9cd-1f3fc-200d-2640-fe0f", + "woman_standing_medium_skin_tone": "1f9cd-1f3fd-200d-2640-fe0f", + "woman_standing_medium_dark_skin_tone": "1f9cd-1f3fe-200d-2640-fe0f", + "woman_standing_dark_skin_tone": "1f9cd-1f3ff-200d-2640-fe0f", + "man_standing_light_skin_tone": "1f9cd-1f3fb-200d-2642-fe0f", + "man_standing_medium_light_skin_tone": "1f9cd-1f3fc-200d-2642-fe0f", + "man_standing_medium_skin_tone": "1f9cd-1f3fd-200d-2642-fe0f", + "man_standing_medium_dark_skin_tone": "1f9cd-1f3fe-200d-2642-fe0f", + "man_standing_dark_skin_tone": "1f9cd-1f3ff-200d-2642-fe0f", + "standing_person_light_skin_tone": "1f9cd-1f3fb", + "standing_person_medium_light_skin_tone": "1f9cd-1f3fc", + "standing_person_medium_skin_tone": "1f9cd-1f3fd", + "standing_person_medium_dark_skin_tone": "1f9cd-1f3fe", + "standing_person_dark_skin_tone": "1f9cd-1f3ff", + "woman_kneeling_light_skin_tone": "1f9ce-1f3fb-200d-2640-fe0f", + "woman_kneeling_medium_light_skin_tone": "1f9ce-1f3fc-200d-2640-fe0f", + "woman_kneeling_medium_skin_tone": "1f9ce-1f3fd-200d-2640-fe0f", + "woman_kneeling_medium_dark_skin_tone": "1f9ce-1f3fe-200d-2640-fe0f", + "woman_kneeling_dark_skin_tone": "1f9ce-1f3ff-200d-2640-fe0f", + "man_kneeling_light_skin_tone": "1f9ce-1f3fb-200d-2642-fe0f", + "man_kneeling_medium_light_skin_tone": "1f9ce-1f3fc-200d-2642-fe0f", + "man_kneeling_medium_skin_tone": "1f9ce-1f3fd-200d-2642-fe0f", + "man_kneeling_medium_dark_skin_tone": "1f9ce-1f3fe-200d-2642-fe0f", + "man_kneeling_dark_skin_tone": "1f9ce-1f3ff-200d-2642-fe0f", + "kneeling_person_light_skin_tone": "1f9ce-1f3fb", + "kneeling_person_medium_light_skin_tone": "1f9ce-1f3fc", + "kneeling_person_medium_skin_tone": "1f9ce-1f3fd", + "kneeling_person_medium_dark_skin_tone": "1f9ce-1f3fe", + "kneeling_person_dark_skin_tone": "1f9ce-1f3ff", + "deaf_woman_light_skin_tone": "1f9cf-1f3fb-200d-2640-fe0f", + "deaf_woman_medium_light_skin_tone": "1f9cf-1f3fc-200d-2640-fe0f", + "deaf_woman_medium_skin_tone": "1f9cf-1f3fd-200d-2640-fe0f", + "deaf_woman_medium_dark_skin_tone": "1f9cf-1f3fe-200d-2640-fe0f", + "deaf_woman_dark_skin_tone": "1f9cf-1f3ff-200d-2640-fe0f", + "deaf_man_light_skin_tone": "1f9cf-1f3fb-200d-2642-fe0f", + "deaf_man_medium_light_skin_tone": "1f9cf-1f3fc-200d-2642-fe0f", + "deaf_man_medium_skin_tone": "1f9cf-1f3fd-200d-2642-fe0f", + "deaf_man_medium_dark_skin_tone": "1f9cf-1f3fe-200d-2642-fe0f", + "deaf_man_dark_skin_tone": "1f9cf-1f3ff-200d-2642-fe0f", + "deaf_person_light_skin_tone": "1f9cf-1f3fb", + "deaf_person_medium_light_skin_tone": "1f9cf-1f3fc", + "deaf_person_medium_skin_tone": "1f9cf-1f3fd", + "deaf_person_medium_dark_skin_tone": "1f9cf-1f3fe", + "deaf_person_dark_skin_tone": "1f9cf-1f3ff", + "farmer_light_skin_tone": "1f9d1-1f3fb-200d-1f33e", + "farmer_medium_light_skin_tone": "1f9d1-1f3fc-200d-1f33e", + "farmer_medium_skin_tone": "1f9d1-1f3fd-200d-1f33e", + "farmer_medium_dark_skin_tone": "1f9d1-1f3fe-200d-1f33e", + "farmer_dark_skin_tone": "1f9d1-1f3ff-200d-1f33e", + "cook_light_skin_tone": "1f9d1-1f3fb-200d-1f373", + "cook_medium_light_skin_tone": "1f9d1-1f3fc-200d-1f373", + "cook_medium_skin_tone": "1f9d1-1f3fd-200d-1f373", + "cook_medium_dark_skin_tone": "1f9d1-1f3fe-200d-1f373", + "cook_dark_skin_tone": "1f9d1-1f3ff-200d-1f373", + "person_feeding_baby_light_skin_tone": "1f9d1-1f3fb-200d-1f37c", + "person_feeding_baby_medium_light_skin_tone": "1f9d1-1f3fc-200d-1f37c", + "person_feeding_baby_medium_skin_tone": "1f9d1-1f3fd-200d-1f37c", + "person_feeding_baby_medium_dark_skin_tone": "1f9d1-1f3fe-200d-1f37c", + "person_feeding_baby_dark_skin_tone": "1f9d1-1f3ff-200d-1f37c", + "mx_claus_light_skin_tone": "1f9d1-1f3fb-200d-1f384", + "mx_claus_medium_light_skin_tone": "1f9d1-1f3fc-200d-1f384", + "mx_claus_medium_skin_tone": "1f9d1-1f3fd-200d-1f384", + "mx_claus_medium_dark_skin_tone": "1f9d1-1f3fe-200d-1f384", + "mx_claus_dark_skin_tone": "1f9d1-1f3ff-200d-1f384", + "student_light_skin_tone": "1f9d1-1f3fb-200d-1f393", + "student_medium_light_skin_tone": "1f9d1-1f3fc-200d-1f393", + "student_medium_skin_tone": "1f9d1-1f3fd-200d-1f393", + "student_medium_dark_skin_tone": "1f9d1-1f3fe-200d-1f393", + "student_dark_skin_tone": "1f9d1-1f3ff-200d-1f393", + "singer_light_skin_tone": "1f9d1-1f3fb-200d-1f3a4", + "singer_medium_light_skin_tone": "1f9d1-1f3fc-200d-1f3a4", + "singer_medium_skin_tone": "1f9d1-1f3fd-200d-1f3a4", + "singer_medium_dark_skin_tone": "1f9d1-1f3fe-200d-1f3a4", + "singer_dark_skin_tone": "1f9d1-1f3ff-200d-1f3a4", + "artist_light_skin_tone": "1f9d1-1f3fb-200d-1f3a8", + "artist_medium_light_skin_tone": "1f9d1-1f3fc-200d-1f3a8", + "artist_medium_skin_tone": "1f9d1-1f3fd-200d-1f3a8", + "artist_medium_dark_skin_tone": "1f9d1-1f3fe-200d-1f3a8", + "artist_dark_skin_tone": "1f9d1-1f3ff-200d-1f3a8", + "teacher_light_skin_tone": "1f9d1-1f3fb-200d-1f3eb", + "teacher_medium_light_skin_tone": "1f9d1-1f3fc-200d-1f3eb", + "teacher_medium_skin_tone": "1f9d1-1f3fd-200d-1f3eb", + "teacher_medium_dark_skin_tone": "1f9d1-1f3fe-200d-1f3eb", + "teacher_dark_skin_tone": "1f9d1-1f3ff-200d-1f3eb", + "factory_worker_light_skin_tone": "1f9d1-1f3fb-200d-1f3ed", + "factory_worker_medium_light_skin_tone": "1f9d1-1f3fc-200d-1f3ed", + "factory_worker_medium_skin_tone": "1f9d1-1f3fd-200d-1f3ed", + "factory_worker_medium_dark_skin_tone": "1f9d1-1f3fe-200d-1f3ed", + "factory_worker_dark_skin_tone": "1f9d1-1f3ff-200d-1f3ed", + "technologist_light_skin_tone": "1f9d1-1f3fb-200d-1f4bb", + "technologist_medium_light_skin_tone": "1f9d1-1f3fc-200d-1f4bb", + "technologist_medium_skin_tone": "1f9d1-1f3fd-200d-1f4bb", + "technologist_medium_dark_skin_tone": "1f9d1-1f3fe-200d-1f4bb", + "technologist_dark_skin_tone": "1f9d1-1f3ff-200d-1f4bb", + "office_worker_light_skin_tone": "1f9d1-1f3fb-200d-1f4bc", + "office_worker_medium_light_skin_tone": "1f9d1-1f3fc-200d-1f4bc", + "office_worker_medium_skin_tone": "1f9d1-1f3fd-200d-1f4bc", + "office_worker_medium_dark_skin_tone": "1f9d1-1f3fe-200d-1f4bc", + "office_worker_dark_skin_tone": "1f9d1-1f3ff-200d-1f4bc", + "mechanic_light_skin_tone": "1f9d1-1f3fb-200d-1f527", + "mechanic_medium_light_skin_tone": "1f9d1-1f3fc-200d-1f527", + "mechanic_medium_skin_tone": "1f9d1-1f3fd-200d-1f527", + "mechanic_medium_dark_skin_tone": "1f9d1-1f3fe-200d-1f527", + "mechanic_dark_skin_tone": "1f9d1-1f3ff-200d-1f527", + "scientist_light_skin_tone": "1f9d1-1f3fb-200d-1f52c", + "scientist_medium_light_skin_tone": "1f9d1-1f3fc-200d-1f52c", + "scientist_medium_skin_tone": "1f9d1-1f3fd-200d-1f52c", + "scientist_medium_dark_skin_tone": "1f9d1-1f3fe-200d-1f52c", + "scientist_dark_skin_tone": "1f9d1-1f3ff-200d-1f52c", + "astronaut_light_skin_tone": "1f9d1-1f3fb-200d-1f680", + "astronaut_medium_light_skin_tone": "1f9d1-1f3fc-200d-1f680", + "astronaut_medium_skin_tone": "1f9d1-1f3fd-200d-1f680", + "astronaut_medium_dark_skin_tone": "1f9d1-1f3fe-200d-1f680", + "astronaut_dark_skin_tone": "1f9d1-1f3ff-200d-1f680", + "firefighter_light_skin_tone": "1f9d1-1f3fb-200d-1f692", + "firefighter_medium_light_skin_tone": "1f9d1-1f3fc-200d-1f692", + "firefighter_medium_skin_tone": "1f9d1-1f3fd-200d-1f692", + "firefighter_medium_dark_skin_tone": "1f9d1-1f3fe-200d-1f692", + "firefighter_dark_skin_tone": "1f9d1-1f3ff-200d-1f692", + "people_holding_hands_light_skin_tone_light_skin_tone": "1f9d1-1f3fb-200d-1f91d-200d-1f9d1-1f3fb", + "people_holding_hands_light_skin_tone_medium_light_skin_tone": "1f9d1-1f3fb-200d-1f91d-200d-1f9d1-1f3fc", + "people_holding_hands_light_skin_tone_medium_skin_tone": "1f9d1-1f3fb-200d-1f91d-200d-1f9d1-1f3fd", + "people_holding_hands_light_skin_tone_medium_dark_skin_tone": "1f9d1-1f3fb-200d-1f91d-200d-1f9d1-1f3fe", + "people_holding_hands_light_skin_tone_dark_skin_tone": "1f9d1-1f3fb-200d-1f91d-200d-1f9d1-1f3ff", + "people_holding_hands_medium_light_skin_tone_light_skin_tone": "1f9d1-1f3fc-200d-1f91d-200d-1f9d1-1f3fb", + "people_holding_hands_medium_light_skin_tone_medium_light_skin_tone": "1f9d1-1f3fc-200d-1f91d-200d-1f9d1-1f3fc", + "people_holding_hands_medium_light_skin_tone_medium_skin_tone": "1f9d1-1f3fc-200d-1f91d-200d-1f9d1-1f3fd", + "people_holding_hands_medium_light_skin_tone_medium_dark_skin_tone": "1f9d1-1f3fc-200d-1f91d-200d-1f9d1-1f3fe", + "people_holding_hands_medium_light_skin_tone_dark_skin_tone": "1f9d1-1f3fc-200d-1f91d-200d-1f9d1-1f3ff", + "people_holding_hands_medium_skin_tone_light_skin_tone": "1f9d1-1f3fd-200d-1f91d-200d-1f9d1-1f3fb", + "people_holding_hands_medium_skin_tone_medium_light_skin_tone": "1f9d1-1f3fd-200d-1f91d-200d-1f9d1-1f3fc", + "people_holding_hands_medium_skin_tone_medium_skin_tone": "1f9d1-1f3fd-200d-1f91d-200d-1f9d1-1f3fd", + "people_holding_hands_medium_skin_tone_medium_dark_skin_tone": "1f9d1-1f3fd-200d-1f91d-200d-1f9d1-1f3fe", + "people_holding_hands_medium_skin_tone_dark_skin_tone": "1f9d1-1f3fd-200d-1f91d-200d-1f9d1-1f3ff", + "people_holding_hands_medium_dark_skin_tone_light_skin_tone": "1f9d1-1f3fe-200d-1f91d-200d-1f9d1-1f3fb", + "people_holding_hands_medium_dark_skin_tone_medium_light_skin_tone": "1f9d1-1f3fe-200d-1f91d-200d-1f9d1-1f3fc", + "people_holding_hands_medium_dark_skin_tone_medium_skin_tone": "1f9d1-1f3fe-200d-1f91d-200d-1f9d1-1f3fd", + "people_holding_hands_medium_dark_skin_tone_medium_dark_skin_tone": "1f9d1-1f3fe-200d-1f91d-200d-1f9d1-1f3fe", + "people_holding_hands_medium_dark_skin_tone_dark_skin_tone": "1f9d1-1f3fe-200d-1f91d-200d-1f9d1-1f3ff", + "people_holding_hands_dark_skin_tone_light_skin_tone": "1f9d1-1f3ff-200d-1f91d-200d-1f9d1-1f3fb", + "people_holding_hands_dark_skin_tone_medium_light_skin_tone": "1f9d1-1f3ff-200d-1f91d-200d-1f9d1-1f3fc", + "people_holding_hands_dark_skin_tone_medium_skin_tone": "1f9d1-1f3ff-200d-1f91d-200d-1f9d1-1f3fd", + "people_holding_hands_dark_skin_tone_medium_dark_skin_tone": "1f9d1-1f3ff-200d-1f91d-200d-1f9d1-1f3fe", + "people_holding_hands_dark_skin_tone_dark_skin_tone": "1f9d1-1f3ff-200d-1f91d-200d-1f9d1-1f3ff", + "person_with_probing_cane_light_skin_tone": "1f9d1-1f3fb-200d-1f9af", + "person_with_probing_cane_medium_light_skin_tone": "1f9d1-1f3fc-200d-1f9af", + "person_with_probing_cane_medium_skin_tone": "1f9d1-1f3fd-200d-1f9af", + "person_with_probing_cane_medium_dark_skin_tone": "1f9d1-1f3fe-200d-1f9af", + "person_with_probing_cane_dark_skin_tone": "1f9d1-1f3ff-200d-1f9af", + "red_haired_person_light_skin_tone": "1f9d1-1f3fb-200d-1f9b0", + "red_haired_person_medium_light_skin_tone": "1f9d1-1f3fc-200d-1f9b0", + "red_haired_person_medium_skin_tone": "1f9d1-1f3fd-200d-1f9b0", + "red_haired_person_medium_dark_skin_tone": "1f9d1-1f3fe-200d-1f9b0", + "red_haired_person_dark_skin_tone": "1f9d1-1f3ff-200d-1f9b0", + "curly_haired_person_light_skin_tone": "1f9d1-1f3fb-200d-1f9b1", + "curly_haired_person_medium_light_skin_tone": "1f9d1-1f3fc-200d-1f9b1", + "curly_haired_person_medium_skin_tone": "1f9d1-1f3fd-200d-1f9b1", + "curly_haired_person_medium_dark_skin_tone": "1f9d1-1f3fe-200d-1f9b1", + "curly_haired_person_dark_skin_tone": "1f9d1-1f3ff-200d-1f9b1", + "bald_person_light_skin_tone": "1f9d1-1f3fb-200d-1f9b2", + "bald_person_medium_light_skin_tone": "1f9d1-1f3fc-200d-1f9b2", + "bald_person_medium_skin_tone": "1f9d1-1f3fd-200d-1f9b2", + "bald_person_medium_dark_skin_tone": "1f9d1-1f3fe-200d-1f9b2", + "bald_person_dark_skin_tone": "1f9d1-1f3ff-200d-1f9b2", + "white_haired_person_light_skin_tone": "1f9d1-1f3fb-200d-1f9b3", + "white_haired_person_medium_light_skin_tone": "1f9d1-1f3fc-200d-1f9b3", + "white_haired_person_medium_skin_tone": "1f9d1-1f3fd-200d-1f9b3", + "white_haired_person_medium_dark_skin_tone": "1f9d1-1f3fe-200d-1f9b3", + "white_haired_person_dark_skin_tone": "1f9d1-1f3ff-200d-1f9b3", + "person_in_motorized_wheelchair_light_skin_tone": "1f9d1-1f3fb-200d-1f9bc", + "person_in_motorized_wheelchair_medium_light_skin_tone": "1f9d1-1f3fc-200d-1f9bc", + "person_in_motorized_wheelchair_medium_skin_tone": "1f9d1-1f3fd-200d-1f9bc", + "person_in_motorized_wheelchair_medium_dark_skin_tone": "1f9d1-1f3fe-200d-1f9bc", + "person_in_motorized_wheelchair_dark_skin_tone": "1f9d1-1f3ff-200d-1f9bc", + "person_in_manual_wheelchair_light_skin_tone": "1f9d1-1f3fb-200d-1f9bd", + "person_in_manual_wheelchair_medium_light_skin_tone": "1f9d1-1f3fc-200d-1f9bd", + "person_in_manual_wheelchair_medium_skin_tone": "1f9d1-1f3fd-200d-1f9bd", + "person_in_manual_wheelchair_medium_dark_skin_tone": "1f9d1-1f3fe-200d-1f9bd", + "person_in_manual_wheelchair_dark_skin_tone": "1f9d1-1f3ff-200d-1f9bd", + "health_worker_light_skin_tone": "1f9d1-1f3fb-200d-2695-fe0f", + "doctor_light_skin_tone": "1f9d1-1f3fb-200d-2695-fe0f", + "health_worker_medium_light_skin_tone": "1f9d1-1f3fc-200d-2695-fe0f", + "doctor_medium_light_skin_tone": "1f9d1-1f3fc-200d-2695-fe0f", + "health_worker_medium_skin_tone": "1f9d1-1f3fd-200d-2695-fe0f", + "doctor_medium_skin_tone": "1f9d1-1f3fd-200d-2695-fe0f", + "health_worker_medium_dark_skin_tone": "1f9d1-1f3fe-200d-2695-fe0f", + "doctor_medium_dark_skin_tone": "1f9d1-1f3fe-200d-2695-fe0f", + "health_worker_dark_skin_tone": "1f9d1-1f3ff-200d-2695-fe0f", + "doctor_dark_skin_tone": "1f9d1-1f3ff-200d-2695-fe0f", + "judge_light_skin_tone": "1f9d1-1f3fb-200d-2696-fe0f", + "judge_medium_light_skin_tone": "1f9d1-1f3fc-200d-2696-fe0f", + "judge_medium_skin_tone": "1f9d1-1f3fd-200d-2696-fe0f", + "judge_medium_dark_skin_tone": "1f9d1-1f3fe-200d-2696-fe0f", + "judge_dark_skin_tone": "1f9d1-1f3ff-200d-2696-fe0f", + "pilot_light_skin_tone": "1f9d1-1f3fb-200d-2708-fe0f", + "pilot_medium_light_skin_tone": "1f9d1-1f3fc-200d-2708-fe0f", + "pilot_medium_skin_tone": "1f9d1-1f3fd-200d-2708-fe0f", + "pilot_medium_dark_skin_tone": "1f9d1-1f3fe-200d-2708-fe0f", + "pilot_dark_skin_tone": "1f9d1-1f3ff-200d-2708-fe0f", + "adult_light_skin_tone": "1f9d1-1f3fb", + "adult_medium_light_skin_tone": "1f9d1-1f3fc", + "adult_medium_skin_tone": "1f9d1-1f3fd", + "adult_medium_dark_skin_tone": "1f9d1-1f3fe", + "adult_dark_skin_tone": "1f9d1-1f3ff", + "child_light_skin_tone": "1f9d2-1f3fb", + "child_medium_light_skin_tone": "1f9d2-1f3fc", + "child_medium_skin_tone": "1f9d2-1f3fd", + "child_medium_dark_skin_tone": "1f9d2-1f3fe", + "child_dark_skin_tone": "1f9d2-1f3ff", + "older_adult_light_skin_tone": "1f9d3-1f3fb", + "older_adult_medium_light_skin_tone": "1f9d3-1f3fc", + "older_adult_medium_skin_tone": "1f9d3-1f3fd", + "older_adult_medium_dark_skin_tone": "1f9d3-1f3fe", + "older_adult_dark_skin_tone": "1f9d3-1f3ff", + "bearded_person_light_skin_tone": "1f9d4-1f3fb", + "bearded_person_medium_light_skin_tone": "1f9d4-1f3fc", + "bearded_person_medium_skin_tone": "1f9d4-1f3fd", + "bearded_person_medium_dark_skin_tone": "1f9d4-1f3fe", + "bearded_person_dark_skin_tone": "1f9d4-1f3ff", + "person_with_headscarf_light_skin_tone": "1f9d5-1f3fb", + "person_with_headscarf_medium_light_skin_tone": "1f9d5-1f3fc", + "person_with_headscarf_medium_skin_tone": "1f9d5-1f3fd", + "person_with_headscarf_medium_dark_skin_tone": "1f9d5-1f3fe", + "person_with_headscarf_dark_skin_tone": "1f9d5-1f3ff", + "woman_in_steamy_room_light_skin_tone": "1f9d6-1f3fb-200d-2640-fe0f", + "woman_in_steamy_room_medium_light_skin_tone": "1f9d6-1f3fc-200d-2640-fe0f", + "woman_in_steamy_room_medium_skin_tone": "1f9d6-1f3fd-200d-2640-fe0f", + "woman_in_steamy_room_medium_dark_skin_tone": "1f9d6-1f3fe-200d-2640-fe0f", + "woman_in_steamy_room_dark_skin_tone": "1f9d6-1f3ff-200d-2640-fe0f", + "man_in_steamy_room_light_skin_tone": "1f9d6-1f3fb-200d-2642-fe0f", + "man_in_steamy_room_medium_light_skin_tone": "1f9d6-1f3fc-200d-2642-fe0f", + "man_in_steamy_room_medium_skin_tone": "1f9d6-1f3fd-200d-2642-fe0f", + "man_in_steamy_room_medium_dark_skin_tone": "1f9d6-1f3fe-200d-2642-fe0f", + "man_in_steamy_room_dark_skin_tone": "1f9d6-1f3ff-200d-2642-fe0f", + "person_in_steamy_room_light_skin_tone": "1f9d6-1f3fb", + "person_in_steamy_room_medium_light_skin_tone": "1f9d6-1f3fc", + "person_in_steamy_room_medium_skin_tone": "1f9d6-1f3fd", + "person_in_steamy_room_medium_dark_skin_tone": "1f9d6-1f3fe", + "person_in_steamy_room_dark_skin_tone": "1f9d6-1f3ff", + "woman_climbing_light_skin_tone": "1f9d7-1f3fb-200d-2640-fe0f", + "woman_climbing_medium_light_skin_tone": "1f9d7-1f3fc-200d-2640-fe0f", + "woman_climbing_medium_skin_tone": "1f9d7-1f3fd-200d-2640-fe0f", + "woman_climbing_medium_dark_skin_tone": "1f9d7-1f3fe-200d-2640-fe0f", + "woman_climbing_dark_skin_tone": "1f9d7-1f3ff-200d-2640-fe0f", + "man_climbing_light_skin_tone": "1f9d7-1f3fb-200d-2642-fe0f", + "man_climbing_medium_light_skin_tone": "1f9d7-1f3fc-200d-2642-fe0f", + "man_climbing_medium_skin_tone": "1f9d7-1f3fd-200d-2642-fe0f", + "man_climbing_medium_dark_skin_tone": "1f9d7-1f3fe-200d-2642-fe0f", + "man_climbing_dark_skin_tone": "1f9d7-1f3ff-200d-2642-fe0f", + "person_climbing_light_skin_tone": "1f9d7-1f3fb", + "person_climbing_medium_light_skin_tone": "1f9d7-1f3fc", + "person_climbing_medium_skin_tone": "1f9d7-1f3fd", + "person_climbing_medium_dark_skin_tone": "1f9d7-1f3fe", + "person_climbing_dark_skin_tone": "1f9d7-1f3ff", + "woman_in_lotus_position_light_skin_tone": "1f9d8-1f3fb-200d-2640-fe0f", + "woman_in_lotus_position_medium_light_skin_tone": "1f9d8-1f3fc-200d-2640-fe0f", + "woman_in_lotus_position_medium_skin_tone": "1f9d8-1f3fd-200d-2640-fe0f", + "woman_in_lotus_position_medium_dark_skin_tone": "1f9d8-1f3fe-200d-2640-fe0f", + "woman_in_lotus_position_dark_skin_tone": "1f9d8-1f3ff-200d-2640-fe0f", + "man_in_lotus_position_light_skin_tone": "1f9d8-1f3fb-200d-2642-fe0f", + "man_in_lotus_position_medium_light_skin_tone": "1f9d8-1f3fc-200d-2642-fe0f", + "man_in_lotus_position_medium_skin_tone": "1f9d8-1f3fd-200d-2642-fe0f", + "man_in_lotus_position_medium_dark_skin_tone": "1f9d8-1f3fe-200d-2642-fe0f", + "man_in_lotus_position_dark_skin_tone": "1f9d8-1f3ff-200d-2642-fe0f", + "person_in_lotus_position_light_skin_tone": "1f9d8-1f3fb", + "person_in_lotus_position_medium_light_skin_tone": "1f9d8-1f3fc", + "person_in_lotus_position_medium_skin_tone": "1f9d8-1f3fd", + "person_in_lotus_position_medium_dark_skin_tone": "1f9d8-1f3fe", + "person_in_lotus_position_dark_skin_tone": "1f9d8-1f3ff", + "female_mage_light_skin_tone": "1f9d9-1f3fb-200d-2640-fe0f", + "female_mage_medium_light_skin_tone": "1f9d9-1f3fc-200d-2640-fe0f", + "female_mage_medium_skin_tone": "1f9d9-1f3fd-200d-2640-fe0f", + "female_mage_medium_dark_skin_tone": "1f9d9-1f3fe-200d-2640-fe0f", + "female_mage_dark_skin_tone": "1f9d9-1f3ff-200d-2640-fe0f", + "male_mage_light_skin_tone": "1f9d9-1f3fb-200d-2642-fe0f", + "male_mage_medium_light_skin_tone": "1f9d9-1f3fc-200d-2642-fe0f", + "male_mage_medium_skin_tone": "1f9d9-1f3fd-200d-2642-fe0f", + "male_mage_medium_dark_skin_tone": "1f9d9-1f3fe-200d-2642-fe0f", + "male_mage_dark_skin_tone": "1f9d9-1f3ff-200d-2642-fe0f", + "mage_light_skin_tone": "1f9d9-1f3fb", + "mage_medium_light_skin_tone": "1f9d9-1f3fc", + "mage_medium_skin_tone": "1f9d9-1f3fd", + "mage_medium_dark_skin_tone": "1f9d9-1f3fe", + "mage_dark_skin_tone": "1f9d9-1f3ff", + "female_fairy_light_skin_tone": "1f9da-1f3fb-200d-2640-fe0f", + "female_fairy_medium_light_skin_tone": "1f9da-1f3fc-200d-2640-fe0f", + "female_fairy_medium_skin_tone": "1f9da-1f3fd-200d-2640-fe0f", + "female_fairy_medium_dark_skin_tone": "1f9da-1f3fe-200d-2640-fe0f", + "female_fairy_dark_skin_tone": "1f9da-1f3ff-200d-2640-fe0f", + "male_fairy_light_skin_tone": "1f9da-1f3fb-200d-2642-fe0f", + "male_fairy_medium_light_skin_tone": "1f9da-1f3fc-200d-2642-fe0f", + "male_fairy_medium_skin_tone": "1f9da-1f3fd-200d-2642-fe0f", + "male_fairy_medium_dark_skin_tone": "1f9da-1f3fe-200d-2642-fe0f", + "male_fairy_dark_skin_tone": "1f9da-1f3ff-200d-2642-fe0f", + "fairy_light_skin_tone": "1f9da-1f3fb", + "fairy_medium_light_skin_tone": "1f9da-1f3fc", + "fairy_medium_skin_tone": "1f9da-1f3fd", + "fairy_medium_dark_skin_tone": "1f9da-1f3fe", + "fairy_dark_skin_tone": "1f9da-1f3ff", + "female_vampire_light_skin_tone": "1f9db-1f3fb-200d-2640-fe0f", + "female_vampire_medium_light_skin_tone": "1f9db-1f3fc-200d-2640-fe0f", + "female_vampire_medium_skin_tone": "1f9db-1f3fd-200d-2640-fe0f", + "female_vampire_medium_dark_skin_tone": "1f9db-1f3fe-200d-2640-fe0f", + "female_vampire_dark_skin_tone": "1f9db-1f3ff-200d-2640-fe0f", + "male_vampire_light_skin_tone": "1f9db-1f3fb-200d-2642-fe0f", + "male_vampire_medium_light_skin_tone": "1f9db-1f3fc-200d-2642-fe0f", + "male_vampire_medium_skin_tone": "1f9db-1f3fd-200d-2642-fe0f", + "male_vampire_medium_dark_skin_tone": "1f9db-1f3fe-200d-2642-fe0f", + "male_vampire_dark_skin_tone": "1f9db-1f3ff-200d-2642-fe0f", + "vampire_light_skin_tone": "1f9db-1f3fb", + "vampire_medium_light_skin_tone": "1f9db-1f3fc", + "vampire_medium_skin_tone": "1f9db-1f3fd", + "vampire_medium_dark_skin_tone": "1f9db-1f3fe", + "vampire_dark_skin_tone": "1f9db-1f3ff", + "mermaid_light_skin_tone": "1f9dc-1f3fb-200d-2640-fe0f", + "mermaid_medium_light_skin_tone": "1f9dc-1f3fc-200d-2640-fe0f", + "mermaid_medium_skin_tone": "1f9dc-1f3fd-200d-2640-fe0f", + "mermaid_medium_dark_skin_tone": "1f9dc-1f3fe-200d-2640-fe0f", + "mermaid_dark_skin_tone": "1f9dc-1f3ff-200d-2640-fe0f", + "merman_light_skin_tone": "1f9dc-1f3fb-200d-2642-fe0f", + "merman_medium_light_skin_tone": "1f9dc-1f3fc-200d-2642-fe0f", + "merman_medium_skin_tone": "1f9dc-1f3fd-200d-2642-fe0f", + "merman_medium_dark_skin_tone": "1f9dc-1f3fe-200d-2642-fe0f", + "merman_dark_skin_tone": "1f9dc-1f3ff-200d-2642-fe0f", + "merperson_light_skin_tone": "1f9dc-1f3fb", + "merperson_medium_light_skin_tone": "1f9dc-1f3fc", + "merperson_medium_skin_tone": "1f9dc-1f3fd", + "merperson_medium_dark_skin_tone": "1f9dc-1f3fe", + "merperson_dark_skin_tone": "1f9dc-1f3ff", + "female_elf_light_skin_tone": "1f9dd-1f3fb-200d-2640-fe0f", + "female_elf_medium_light_skin_tone": "1f9dd-1f3fc-200d-2640-fe0f", + "female_elf_medium_skin_tone": "1f9dd-1f3fd-200d-2640-fe0f", + "female_elf_medium_dark_skin_tone": "1f9dd-1f3fe-200d-2640-fe0f", + "female_elf_dark_skin_tone": "1f9dd-1f3ff-200d-2640-fe0f", + "male_elf_light_skin_tone": "1f9dd-1f3fb-200d-2642-fe0f", + "male_elf_medium_light_skin_tone": "1f9dd-1f3fc-200d-2642-fe0f", + "male_elf_medium_skin_tone": "1f9dd-1f3fd-200d-2642-fe0f", + "male_elf_medium_dark_skin_tone": "1f9dd-1f3fe-200d-2642-fe0f", + "male_elf_dark_skin_tone": "1f9dd-1f3ff-200d-2642-fe0f", + "elf_light_skin_tone": "1f9dd-1f3fb", + "elf_medium_light_skin_tone": "1f9dd-1f3fc", + "elf_medium_skin_tone": "1f9dd-1f3fd", + "elf_medium_dark_skin_tone": "1f9dd-1f3fe", + "elf_dark_skin_tone": "1f9dd-1f3ff", + "point_up_light_skin_tone": "261d-1f3fb", + "point_up_medium_light_skin_tone": "261d-1f3fc", + "point_up_medium_skin_tone": "261d-1f3fd", + "point_up_medium_dark_skin_tone": "261d-1f3fe", + "point_up_dark_skin_tone": "261d-1f3ff", + "woman-bouncing-ball_light_skin_tone": "26f9-1f3fb-200d-2640-fe0f", + "basketball_woman_light_skin_tone": "26f9-1f3fb-200d-2640-fe0f", + "woman-bouncing-ball_medium_light_skin_tone": "26f9-1f3fc-200d-2640-fe0f", + "basketball_woman_medium_light_skin_tone": "26f9-1f3fc-200d-2640-fe0f", + "woman-bouncing-ball_medium_skin_tone": "26f9-1f3fd-200d-2640-fe0f", + "basketball_woman_medium_skin_tone": "26f9-1f3fd-200d-2640-fe0f", + "woman-bouncing-ball_medium_dark_skin_tone": "26f9-1f3fe-200d-2640-fe0f", + "basketball_woman_medium_dark_skin_tone": "26f9-1f3fe-200d-2640-fe0f", + "woman-bouncing-ball_dark_skin_tone": "26f9-1f3ff-200d-2640-fe0f", + "basketball_woman_dark_skin_tone": "26f9-1f3ff-200d-2640-fe0f", + "man-bouncing-ball_light_skin_tone": "26f9-1f3fb-200d-2642-fe0f", + "basketball_man_light_skin_tone": "26f9-1f3fb-200d-2642-fe0f", + "man-bouncing-ball_medium_light_skin_tone": "26f9-1f3fc-200d-2642-fe0f", + "basketball_man_medium_light_skin_tone": "26f9-1f3fc-200d-2642-fe0f", + "man-bouncing-ball_medium_skin_tone": "26f9-1f3fd-200d-2642-fe0f", + "basketball_man_medium_skin_tone": "26f9-1f3fd-200d-2642-fe0f", + "man-bouncing-ball_medium_dark_skin_tone": "26f9-1f3fe-200d-2642-fe0f", + "basketball_man_medium_dark_skin_tone": "26f9-1f3fe-200d-2642-fe0f", + "man-bouncing-ball_dark_skin_tone": "26f9-1f3ff-200d-2642-fe0f", + "basketball_man_dark_skin_tone": "26f9-1f3ff-200d-2642-fe0f", + "person_with_ball_light_skin_tone": "26f9-1f3fb", + "person_with_ball_medium_light_skin_tone": "26f9-1f3fc", + "person_with_ball_medium_skin_tone": "26f9-1f3fd", + "person_with_ball_medium_dark_skin_tone": "26f9-1f3fe", + "person_with_ball_dark_skin_tone": "26f9-1f3ff", + "fist_light_skin_tone": "270a-1f3fb", + "fist_raised_light_skin_tone": "270a-1f3fb", + "fist_medium_light_skin_tone": "270a-1f3fc", + "fist_raised_medium_light_skin_tone": "270a-1f3fc", + "fist_medium_skin_tone": "270a-1f3fd", + "fist_raised_medium_skin_tone": "270a-1f3fd", + "fist_medium_dark_skin_tone": "270a-1f3fe", + "fist_raised_medium_dark_skin_tone": "270a-1f3fe", + "fist_dark_skin_tone": "270a-1f3ff", + "fist_raised_dark_skin_tone": "270a-1f3ff", + "hand_light_skin_tone": "270b-1f3fb", + "raised_hand_light_skin_tone": "270b-1f3fb", + "hand_medium_light_skin_tone": "270b-1f3fc", + "raised_hand_medium_light_skin_tone": "270b-1f3fc", + "hand_medium_skin_tone": "270b-1f3fd", + "raised_hand_medium_skin_tone": "270b-1f3fd", + "hand_medium_dark_skin_tone": "270b-1f3fe", + "raised_hand_medium_dark_skin_tone": "270b-1f3fe", + "hand_dark_skin_tone": "270b-1f3ff", + "raised_hand_dark_skin_tone": "270b-1f3ff", + "v_light_skin_tone": "270c-1f3fb", + "v_medium_light_skin_tone": "270c-1f3fc", + "v_medium_skin_tone": "270c-1f3fd", + "v_medium_dark_skin_tone": "270c-1f3fe", + "v_dark_skin_tone": "270c-1f3ff", + "writing_hand_light_skin_tone": "270d-1f3fb", + "writing_hand_medium_light_skin_tone": "270d-1f3fc", + "writing_hand_medium_skin_tone": "270d-1f3fd", + "writing_hand_medium_dark_skin_tone": "270d-1f3fe", + "writing_hand_dark_skin_tone": "270d-1f3ff", + "mattermost": "mattermost", +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/emoji_search.go b/vendor/github.com/mattermost/mattermost/server/public/model/emoji_search.go new file mode 100644 index 00000000..4d947a11 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/emoji_search.go @@ -0,0 +1,9 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +type EmojiSearch struct { + Term string `json:"term"` + PrefixOnly bool `json:"prefix_only"` +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/feature_flags.go b/vendor/github.com/mattermost/mattermost/server/public/model/feature_flags.go new file mode 100644 index 00000000..64fa8970 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/feature_flags.go @@ -0,0 +1,155 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "reflect" + "strconv" +) + +type FeatureFlags struct { + // Exists only for unit and manual testing. + // When set to a value, will be returned by the ping endpoint. + TestFeature string + // Exists only for testing bool functionality. Boolean feature flags interpret "on" or "true" as true and + // all other values as false. + TestBoolFeature bool + + // Enable the remote cluster service for shared channels. + EnableRemoteClusterService bool + + // Enable DMs and GMs for shared channels. + EnableSharedChannelsDMs bool + + // Enable plugins in shared channels. + EnableSharedChannelsPlugins bool + + // Enable synchronization of channel members in shared channels + EnableSharedChannelsMemberSync bool + + // Enable syncing all users for remote clusters in shared channels + EnableSyncAllUsersForRemoteCluster bool + + // AppsEnabled toggles the Apps framework functionalities both in server and client side + AppsEnabled bool + + PermalinkPreviews bool + + NormalizeLdapDNs bool + + // Enable WYSIWYG text editor + WysiwygEditor bool + + OnboardingTourTips bool + + DeprecateCloudFree bool + + EnableExportDirectDownload bool + + MoveThreadsEnabled bool + + StreamlinedMarketplace bool + + CloudIPFiltering bool + ConsumePostHook bool + + CloudAnnualRenewals bool + CloudDedicatedExportUI bool + + ChannelBookmarks bool + + WebSocketEventScope bool + + NotificationMonitoring bool + + ExperimentalAuditSettingsSystemConsoleUI bool + + CustomProfileAttributes bool + + AttributeBasedAccessControl bool + + ContentFlagging bool + + // Enable AppsForm for Interactive Dialogs instead of legacy dialog implementation + InteractiveDialogAppsForm bool + + EnableMattermostEntry bool + + // Enable mobile SSO SAML code-exchange flow (no tokens in deep links) + MobileSSOCodeExchange bool + + // FEATURE_FLAG_REMOVAL: AutoTranslation - Remove this when MVP is to be released + // Enable auto-translation feature for messages in channels + AutoTranslation bool + + // Enable burn-on-read messages that automatically delete after viewing + BurnOnRead bool + + // FEATURE_FLAG_REMOVAL: EnableAIPluginBridge + EnableAIPluginBridge bool +} + +func (f *FeatureFlags) SetDefaults() { + f.TestFeature = "off" + f.TestBoolFeature = false + f.EnableRemoteClusterService = false + f.EnableSharedChannelsDMs = false + f.EnableSharedChannelsMemberSync = false + f.EnableSyncAllUsersForRemoteCluster = false + f.EnableSharedChannelsPlugins = true + f.AppsEnabled = false + f.NormalizeLdapDNs = false + f.DeprecateCloudFree = false + f.WysiwygEditor = false + f.OnboardingTourTips = true + f.EnableExportDirectDownload = false + f.MoveThreadsEnabled = false + f.StreamlinedMarketplace = true + f.CloudIPFiltering = false + f.ConsumePostHook = false + f.CloudAnnualRenewals = false + f.CloudDedicatedExportUI = false + f.ChannelBookmarks = true + f.WebSocketEventScope = true + f.NotificationMonitoring = true + f.ExperimentalAuditSettingsSystemConsoleUI = true + f.CustomProfileAttributes = true + f.AttributeBasedAccessControl = true + f.ContentFlagging = true + f.InteractiveDialogAppsForm = true + f.EnableMattermostEntry = true + + f.MobileSSOCodeExchange = true + + // FEATURE_FLAG_REMOVAL: AutoTranslation - Remove this default when MVP is to be released + f.AutoTranslation = false + + f.BurnOnRead = false + + // FEATURE_FLAG_REMOVAL: EnableAIPluginBridge - Remove this default when MVP is to be released + f.EnableAIPluginBridge = false +} + +// ToMap returns the feature flags as a map[string]string +// Supports boolean and string feature flags. +func (f *FeatureFlags) ToMap() map[string]string { + refStructVal := reflect.ValueOf(*f) + refStructType := reflect.TypeFor[FeatureFlags]() + ret := make(map[string]string) + for i := 0; i < refStructVal.NumField(); i++ { + refFieldVal := refStructVal.Field(i) + if !refFieldVal.IsValid() { + continue + } + refFieldType := refStructType.Field(i) + switch refFieldType.Type.Kind() { + case reflect.Bool: + ret[refFieldType.Name] = strconv.FormatBool(refFieldVal.Bool()) + default: + ret[refFieldType.Name] = refFieldVal.String() + } + } + + return ret +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/file.go b/vendor/github.com/mattermost/mattermost/server/public/model/file.go new file mode 100644 index 00000000..3cc7badb --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/file.go @@ -0,0 +1,20 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import "time" + +const ( + MaxImageSize = int64(6048 * 4032) // 24 megapixels, roughly 36MB as a raw image +) + +type FileUploadResponse struct { + FileInfos []*FileInfo `json:"file_infos"` + ClientIds []string `json:"client_ids"` +} + +type PresignURLResponse struct { + URL string `json:"url"` + Expiration time.Duration `json:"expiration"` +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/file_info.go b/vendor/github.com/mattermost/mattermost/server/public/model/file_info.go new file mode 100644 index 00000000..7be518c9 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/file_info.go @@ -0,0 +1,177 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "mime" + "net/http" + "path/filepath" + "strings" +) + +const ( + FileinfoSortByCreated = "CreateAt" + FileinfoSortBySize = "Size" +) + +// GetFileInfosOptions contains options for getting FileInfos +type GetFileInfosOptions struct { + // UserIds optionally limits the FileInfos to those created by the given users. + UserIds []string `json:"user_ids"` + // ChannelIds optionally limits the FileInfos to those created in the given channels. + ChannelIds []string `json:"channel_ids"` + // Since optionally limits FileInfos to those created at or after the given time, specified as Unix time in milliseconds. + Since int64 `json:"since"` + // IncludeDeleted if set includes deleted FileInfos. + IncludeDeleted bool `json:"include_deleted"` + // SortBy sorts the FileInfos by this field. The default is to sort by date created. + SortBy string `json:"sort_by"` + // SortDescending changes the sort direction to descending order when true. + SortDescending bool `json:"sort_descending"` +} + +type FileInfo struct { + Id string `json:"id"` + CreatorId string `json:"user_id"` + PostId string `json:"post_id,omitempty"` + // ChannelId is the denormalized value from the corresponding post. Note that this value is + // potentially distinct from the ChannelId provided when the file is first uploaded and + // used to organize the directories in the file store, since in theory that same file + // could be attached to a post from a different channel (or not attached to a post at all). + ChannelId string `json:"channel_id"` + CreateAt int64 `json:"create_at"` + UpdateAt int64 `json:"update_at"` + DeleteAt int64 `json:"delete_at"` + Path string `json:"-"` // not sent back to the client + ThumbnailPath string `json:"-"` // not sent back to the client + PreviewPath string `json:"-"` // not sent back to the client + Name string `json:"name"` + Extension string `json:"extension"` + Size int64 `json:"size"` + MimeType string `json:"mime_type"` + Width int `json:"width,omitempty"` + Height int `json:"height,omitempty"` + HasPreviewImage bool `json:"has_preview_image,omitempty"` + MiniPreview *[]byte `json:"mini_preview"` // declared as *[]byte to avoid postgres/mysql differences in deserialization + Content string `json:"-"` + RemoteId *string `json:"remote_id"` + Archived bool `json:"archived"` +} + +func (fi *FileInfo) Auditable() map[string]any { + return map[string]any{ + "id": fi.Id, + "creator_id": fi.CreatorId, + "post_id": fi.PostId, + "channel_id": fi.ChannelId, + "create_at": fi.CreateAt, + "update_at": fi.UpdateAt, + "delete_at": fi.DeleteAt, + "name": fi.Name, + "extension": fi.Extension, + "size": fi.Size, + } +} + +func (fi *FileInfo) PreSave() { + if fi.Id == "" { + fi.Id = NewId() + } + + if fi.CreateAt == 0 { + fi.CreateAt = GetMillis() + } + + if fi.UpdateAt < fi.CreateAt { + fi.UpdateAt = fi.CreateAt + } + + if fi.RemoteId == nil { + fi.RemoteId = NewPointer("") + } +} + +func (fi *FileInfo) IsValid() *AppError { + if !IsValidId(fi.Id) { + return NewAppError("FileInfo.IsValid", "model.file_info.is_valid.id.app_error", nil, "", http.StatusBadRequest) + } + + if !IsValidId(fi.CreatorId) && (fi.CreatorId != "nouser" && fi.CreatorId != BookmarkFileOwner) { + return NewAppError("FileInfo.IsValid", "model.file_info.is_valid.user_id.app_error", nil, "id="+fi.Id, http.StatusBadRequest) + } + + if fi.PostId != "" && !IsValidId(fi.PostId) { + return NewAppError("FileInfo.IsValid", "model.file_info.is_valid.post_id.app_error", nil, "id="+fi.Id, http.StatusBadRequest) + } + + if fi.CreateAt == 0 { + return NewAppError("FileInfo.IsValid", "model.file_info.is_valid.create_at.app_error", nil, "id="+fi.Id, http.StatusBadRequest) + } + + if fi.UpdateAt == 0 { + return NewAppError("FileInfo.IsValid", "model.file_info.is_valid.update_at.app_error", nil, "id="+fi.Id, http.StatusBadRequest) + } + + if fi.Path == "" { + return NewAppError("FileInfo.IsValid", "model.file_info.is_valid.path.app_error", nil, "id="+fi.Id, http.StatusBadRequest) + } + + return nil +} + +func (fi *FileInfo) IsImage() bool { + return strings.HasPrefix(fi.MimeType, "image") +} + +func (fi *FileInfo) IsSvg() bool { + return fi.MimeType == "image/svg+xml" +} + +func NewInfo(name string) *FileInfo { + info := &FileInfo{ + Name: name, + } + + extension := strings.ToLower(filepath.Ext(name)) + info.MimeType = mime.TypeByExtension(extension) + + if extension != "" && extension[0] == '.' { + // The client expects a file extension without the leading period + info.Extension = extension[1:] + } else { + info.Extension = extension + } + + return info +} + +func GetEtagForFileInfos(infos []*FileInfo) string { + if len(infos) == 0 { + return Etag() + } + + var maxUpdateAt int64 + + for _, info := range infos { + if info.UpdateAt > maxUpdateAt { + maxUpdateAt = info.UpdateAt + } + } + + return Etag(infos[0].PostId, maxUpdateAt) +} + +func (fi *FileInfo) MakeContentInaccessible() { + if fi == nil { + return + } + + fi.Archived = true + fi.Content = "" + fi.HasPreviewImage = false + fi.MiniPreview = nil + fi.Path = "" + fi.PreviewPath = "" + fi.ThumbnailPath = "" +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/file_info_list.go b/vendor/github.com/mattermost/mattermost/server/public/model/file_info_list.go new file mode 100644 index 00000000..3e7cb4fc --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/file_info_list.go @@ -0,0 +1,113 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "sort" +) + +type FileInfoList struct { + Order []string `json:"order"` + FileInfos map[string]*FileInfo `json:"file_infos"` + NextFileInfoId string `json:"next_file_info_id"` + PrevFileInfoId string `json:"prev_file_info_id"` + // If there are inaccessible files, FirstInaccessibleFileTime is the time of the latest inaccessible file + FirstInaccessibleFileTime int64 `json:"first_inaccessible_file_time"` +} + +func NewFileInfoList() *FileInfoList { + return &FileInfoList{ + Order: make([]string, 0), + FileInfos: make(map[string]*FileInfo), + NextFileInfoId: "", + PrevFileInfoId: "", + } +} + +func (o *FileInfoList) ToSlice() []*FileInfo { + var fileInfos []*FileInfo + for _, id := range o.Order { + fileInfos = append(fileInfos, o.FileInfos[id]) + } + return fileInfos +} + +func (o *FileInfoList) MakeNonNil() { + if o.Order == nil { + o.Order = make([]string, 0) + } + + if o.FileInfos == nil { + o.FileInfos = make(map[string]*FileInfo) + } +} + +func (o *FileInfoList) AddOrder(id string) { + if o.Order == nil { + o.Order = make([]string, 0, 128) + } + + o.Order = append(o.Order, id) +} + +func (o *FileInfoList) AddFileInfo(fileInfo *FileInfo) { + if o.FileInfos == nil { + o.FileInfos = make(map[string]*FileInfo) + } + + o.FileInfos[fileInfo.Id] = fileInfo +} + +func (o *FileInfoList) UniqueOrder() { + keys := make(map[string]bool) + order := []string{} + for _, fileInfoId := range o.Order { + if _, value := keys[fileInfoId]; !value { + keys[fileInfoId] = true + order = append(order, fileInfoId) + } + } + + o.Order = order +} + +func (o *FileInfoList) Extend(other *FileInfoList) { + for fileInfoId := range other.FileInfos { + o.AddFileInfo(other.FileInfos[fileInfoId]) + } + + for _, fileInfoId := range other.Order { + o.AddOrder(fileInfoId) + } + + o.UniqueOrder() +} + +func (o *FileInfoList) SortByCreateAt() { + sort.Slice(o.Order, func(i, j int) bool { + return o.FileInfos[o.Order[i]].CreateAt > o.FileInfos[o.Order[j]].CreateAt + }) +} + +func (o *FileInfoList) Etag() string { + id := "0" + var t int64 + + for _, v := range o.FileInfos { + if v.UpdateAt > t { + t = v.UpdateAt + id = v.Id + } else if v.UpdateAt == t && v.Id > id { + t = v.UpdateAt + id = v.Id + } + } + + orderId := "" + if len(o.Order) > 0 { + orderId = o.Order[0] + } + + return Etag(orderId, id, t) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/file_info_search_results.go b/vendor/github.com/mattermost/mattermost/server/public/model/file_info_search_results.go new file mode 100644 index 00000000..fddbffd4 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/file_info_search_results.go @@ -0,0 +1,18 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +type FileInfoSearchMatches map[string][]string + +type FileInfoSearchResults struct { + *FileInfoList + Matches FileInfoSearchMatches `json:"matches"` +} + +func MakeFileInfoSearchResults(fileInfos *FileInfoList, matches FileInfoSearchMatches) *FileInfoSearchResults { + return &FileInfoSearchResults{ + fileInfos, + matches, + } +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/github_release.go b/vendor/github.com/mattermost/mattermost/server/public/model/github_release.go new file mode 100644 index 00000000..931b9fda --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/github_release.go @@ -0,0 +1,26 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "net/http" +) + +type GithubReleaseInfo struct { + Id int `json:"id"` + TagName string `json:"tag_name"` + Name string `json:"name"` + CreatedAt string `json:"created_at"` + PublishedAt string `json:"published_at"` + Body string `json:"body"` + Url string `json:"html_url"` +} + +func (g *GithubReleaseInfo) IsValid() *AppError { + if g.Id == 0 { + return NewAppError("GithubReleaseInfo.IsValid", NoTranslation, nil, "empty ID", http.StatusInternalServerError) + } + + return nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/gitlab.go b/vendor/github.com/mattermost/mattermost/server/public/model/gitlab.go new file mode 100644 index 00000000..c6233f13 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/gitlab.go @@ -0,0 +1,8 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +const ( + UserAuthServiceGitlab = "gitlab" +) diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/group.go b/vendor/github.com/mattermost/mattermost/server/public/model/group.go new file mode 100644 index 00000000..191329c6 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/group.go @@ -0,0 +1,310 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "net/http" + "regexp" + "strings" +) + +const ( + GroupSourceLdap GroupSource = "ldap" + GroupSourceCustom GroupSource = "custom" + + // plugin groups must prefix their source with this + GroupSourcePluginPrefix GroupSource = "plugin_" + + GroupNameMaxLength = 64 + GroupSourceMaxLength = 64 + GroupDisplayNameMaxLength = 128 + GroupDescriptionMaxLength = 1024 + GroupRemoteIDMaxLength = 48 +) + +type GroupSource string + +type Group struct { + Id string `json:"id"` + Name *string `json:"name,omitempty"` + DisplayName string `json:"display_name"` + Description string `json:"description"` + Source GroupSource `json:"source"` + RemoteId *string `json:"remote_id"` + CreateAt int64 `json:"create_at"` + UpdateAt int64 `json:"update_at"` + DeleteAt int64 `json:"delete_at"` + HasSyncables bool `db:"-" json:"has_syncables"` + MemberCount *int `db:"-" json:"member_count,omitempty"` + AllowReference bool `json:"allow_reference"` + ChannelMemberCount *int `db:"-" json:"channel_member_count,omitempty"` + ChannelMemberTimezonesCount *int `db:"-" json:"channel_member_timezones_count,omitempty"` + MemberIDs []string `db:"-" json:"member_ids"` +} + +func (group *Group) Auditable() map[string]any { + return map[string]any{ + "id": group.Id, + "source": group.Source, + "remote_id": group.GetRemoteId(), + "create_at": group.CreateAt, + "update_at": group.UpdateAt, + "delete_at": group.DeleteAt, + "has_syncables": group.HasSyncables, + "member_count": group.GetMemberCount(), + "allow_reference": group.AllowReference, + } +} + +func (group *Group) LogClone() any { + return map[string]any{ + "id": group.Id, + "name": group.GetName(), + "display_name": group.DisplayName, + "source": group.Source, + "remote_id": group.GetRemoteId(), + "create_at": group.CreateAt, + "update_at": group.UpdateAt, + "delete_at": group.DeleteAt, + "has_syncables": group.HasSyncables, + "member_count": group.GetMemberCount(), + "allow_reference": group.AllowReference, + } +} + +type GroupWithUserIds struct { + Group + UserIds []string `json:"user_ids"` +} + +func (group *GroupWithUserIds) Auditable() map[string]any { + return map[string]any{ + "id": group.Id, + "source": group.Source, + "remote_id": group.GetRemoteId(), + "create_at": group.CreateAt, + "update_at": group.UpdateAt, + "delete_at": group.DeleteAt, + "has_syncables": group.HasSyncables, + "member_count": group.GetMemberCount(), + "allow_reference": group.AllowReference, + "user_ids": group.UserIds, + } +} + +type GroupWithSchemeAdmin struct { + Group + SchemeAdmin *bool `db:"SyncableSchemeAdmin" json:"scheme_admin,omitempty"` +} + +type GroupsAssociatedToChannelWithSchemeAdmin struct { + ChannelId string `json:"channel_id"` + Group + SchemeAdmin *bool `db:"SyncableSchemeAdmin" json:"scheme_admin,omitempty"` +} +type GroupsAssociatedToChannel struct { + ChannelId string `json:"channel_id"` + Groups []*GroupWithSchemeAdmin `json:"groups"` +} + +type GroupPatch struct { + Name *string `json:"name"` + DisplayName *string `json:"display_name"` + Description *string `json:"description"` + AllowReference *bool `json:"allow_reference"` + // For security reasons (including preventing unintended LDAP group synchronization) do no allow a Group's RemoteId or Source field to be + // included in patches. +} + +type LdapGroupSearchOpts struct { + Q string + IsLinked *bool + IsConfigured *bool +} + +type GroupSearchOpts struct { + Q string + NotAssociatedToTeam string + NotAssociatedToChannel string + IncludeMemberCount bool + FilterAllowReference bool + PageOpts *PageOpts + Since int64 + Source GroupSource + + // FilterParentTeamPermitted filters the groups to the intersect of the + // set associated to the parent team and those returned by the query. + // If the parent team is not group-constrained or if NotAssociatedToChannel + // is not set then this option is ignored. + FilterParentTeamPermitted bool + + // FilterHasMember filters the groups to the intersect of the + // set returned by the query and those that have the given user as a member. + FilterHasMember string + + IncludeChannelMemberCount string + IncludeTimezones bool + IncludeMemberIDs bool + + // Include archived groups + IncludeArchived bool + + // Only return archived groups + FilterArchived bool + + // OnlySyncableSources filters the groups to only those that are syncable + OnlySyncableSources bool +} + +type GetGroupOpts struct { + IncludeMemberCount bool + IncludeMemberIDs bool +} + +type PageOpts struct { + Page int + PerPage int +} + +type GroupStats struct { + GroupID string `json:"group_id"` + TotalMemberCount int64 `json:"total_member_count"` +} + +type GroupModifyMembers struct { + UserIds []string `json:"user_ids"` +} + +func (group *GroupModifyMembers) Auditable() map[string]any { + return map[string]any{ + "user_ids": group.UserIds, + } +} + +func (group *Group) Patch(patch *GroupPatch) { + if patch.Name != nil { + group.Name = patch.Name + } + if patch.DisplayName != nil { + group.DisplayName = *patch.DisplayName + } + if patch.Description != nil { + group.Description = *patch.Description + } + if patch.AllowReference != nil { + group.AllowReference = *patch.AllowReference + } +} + +func (group *Group) IsValidForCreate() *AppError { + appErr := group.IsValidName() + if appErr != nil { + return appErr + } + + if l := len(group.DisplayName); l == 0 || l > GroupDisplayNameMaxLength { + return NewAppError("Group.IsValidForCreate", "model.group.display_name.app_error", map[string]any{"GroupDisplayNameMaxLength": GroupDisplayNameMaxLength}, "", http.StatusBadRequest) + } + + if len(group.Description) > GroupDescriptionMaxLength { + return NewAppError("Group.IsValidForCreate", "model.group.description.app_error", map[string]any{"GroupDescriptionMaxLength": GroupDescriptionMaxLength}, "", http.StatusBadRequest) + } + + isValidSource := false + if group.Source == GroupSourceLdap || + group.Source == GroupSourceCustom || + strings.HasPrefix(string(group.Source), string(GroupSourcePluginPrefix)) { + isValidSource = true + } + + if !isValidSource { + return NewAppError("Group.IsValidForCreate", "model.group.source.app_error", nil, "", http.StatusBadRequest) + } + + if (group.GetRemoteId() == "" && group.requiresRemoteId()) || len(group.GetRemoteId()) > GroupRemoteIDMaxLength { + return NewAppError("Group.IsValidForCreate", "model.group.remote_id.app_error", nil, "", http.StatusBadRequest) + } + + return nil +} + +func (group *Group) requiresRemoteId() bool { + return group.Source == GroupSourceLdap || strings.HasPrefix(string(group.Source), string(GroupSourcePluginPrefix)) +} + +func GetSyncableGroupSources() []GroupSource { + return []GroupSource{GroupSourceLdap} +} + +func GetSyncableGroupSourcePrefixes() []GroupSource { + return []GroupSource{GroupSourcePluginPrefix} +} + +func (group *Group) IsSyncable() bool { + return group.Source == GroupSourceLdap || strings.HasPrefix(string(group.Source), string(GroupSourcePluginPrefix)) +} + +func (group *Group) IsValidForUpdate() *AppError { + if !IsValidId(group.Id) { + return NewAppError("Group.IsValidForUpdate", "app.group.id.app_error", nil, "", http.StatusBadRequest) + } + if group.CreateAt == 0 { + return NewAppError("Group.IsValidForUpdate", "model.group.create_at.app_error", nil, "", http.StatusBadRequest) + } + if group.UpdateAt == 0 { + return NewAppError("Group.IsValidForUpdate", "model.group.update_at.app_error", nil, "", http.StatusBadRequest) + } + if appErr := group.IsValidForCreate(); appErr != nil { + return appErr + } + return nil +} + +var validGroupnameChars = regexp.MustCompile(`^[a-z0-9\.\-_]+$`) + +func (group *Group) IsValidName() *AppError { + if group.Name == nil { + if group.AllowReference { + return NewAppError("Group.IsValidName", "model.group.name.app_error", map[string]any{"GroupNameMaxLength": GroupNameMaxLength}, "", http.StatusBadRequest) + } + } else { + if l := len(*group.Name); l == 0 || l > GroupNameMaxLength { + return NewAppError("Group.IsValidName", "model.group.name.invalid_length.app_error", map[string]any{"GroupNameMaxLength": GroupNameMaxLength}, "", http.StatusBadRequest) + } + + if *group.Name == UserNotifyAll || *group.Name == ChannelMentionsNotifyProp || *group.Name == UserNotifyHere { + return NewAppError("IsValidName", "model.group.name.reserved_name.app_error", nil, "", http.StatusBadRequest) + } + + if !validGroupnameChars.MatchString(*group.Name) { + return NewAppError("Group.IsValidName", "model.group.name.invalid_chars.app_error", nil, "", http.StatusBadRequest) + } + } + return nil +} + +func (group *Group) GetName() string { + return SafeDereference(group.Name) +} + +func (group *Group) GetRemoteId() string { + return SafeDereference(group.RemoteId) +} + +func (group *Group) GetMemberCount() int { + return SafeDereference(group.MemberCount) +} + +type GroupsWithCount struct { + Groups []*Group `json:"groups"` + TotalCount int64 `json:"total_count"` +} + +type CreateDefaultMembershipParams struct { + Since int64 + ReAddRemovedMembers bool + ScopedUserID *string + ScopedTeamID *string + ScopedChannelID *string +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/group_member.go b/vendor/github.com/mattermost/mattermost/server/public/model/group_member.go new file mode 100644 index 00000000..9cd4e9a5 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/group_member.go @@ -0,0 +1,28 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import "net/http" + +type GroupMember struct { + GroupId string `json:"group_id"` + UserId string `json:"user_id"` + CreateAt int64 `json:"create_at"` + DeleteAt int64 `json:"delete_at"` +} + +func (gm *GroupMember) IsValid() *AppError { + if !IsValidId(gm.GroupId) { + return NewAppError("GroupMember.IsValid", "model.group_member.group_id.app_error", nil, "", http.StatusBadRequest) + } + if !IsValidId(gm.UserId) { + return NewAppError("GroupMember.IsValid", "model.group_member.user_id.app_error", nil, "", http.StatusBadRequest) + } + return nil +} + +type GroupMemberList struct { + Members []*User `json:"members"` + Count int `json:"total_member_count"` +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/group_syncable.go b/vendor/github.com/mattermost/mattermost/server/public/model/group_syncable.go new file mode 100644 index 00000000..28df18c4 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/group_syncable.go @@ -0,0 +1,198 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "encoding/json" + "fmt" + "net/http" +) + +type GroupSyncableType string + +const ( + GroupSyncableTypeTeam GroupSyncableType = "Team" + GroupSyncableTypeChannel GroupSyncableType = "Channel" +) + +func (gst GroupSyncableType) String() string { + return string(gst) +} + +type GroupSyncable struct { + GroupId string `json:"group_id"` + + // SyncableId represents the Id of the model that is being synced with the group, for example a ChannelId or + // TeamId. + SyncableId string `db:"-" json:"-"` + + AutoAdd bool `json:"auto_add"` + SchemeAdmin bool `json:"scheme_admin"` + CreateAt int64 `json:"create_at"` + DeleteAt int64 `json:"delete_at"` + UpdateAt int64 `json:"update_at"` + Type GroupSyncableType `db:"-" json:"-"` + + // Values joined in from the associated team and/or channel + ChannelDisplayName string `db:"-" json:"-"` + TeamDisplayName string `db:"-" json:"-"` + TeamType string `db:"-" json:"-"` + ChannelType string `db:"-" json:"-"` + TeamID string `db:"-" json:"-"` +} + +func (syncable *GroupSyncable) Auditable() map[string]any { + return map[string]any{ + "group_id": syncable.GroupId, + "syncable_id": syncable.SyncableId, + "auto_add": syncable.AutoAdd, + "scheme_admin": syncable.SchemeAdmin, + "create_at": syncable.CreateAt, + "delete_at": syncable.DeleteAt, + "update_at": syncable.UpdateAt, + "type": syncable.Type, + "channel_display_name": syncable.ChannelDisplayName, + "team_display_name": syncable.TeamDisplayName, + "team_type": syncable.TeamType, + "channel_type": syncable.ChannelType, + "team_id": syncable.TeamID, + } +} + +func (syncable *GroupSyncable) IsValid() *AppError { + if !IsValidId(syncable.GroupId) { + return NewAppError("GroupSyncable.SyncableIsValid", "model.group_syncable.group_id.app_error", nil, "", http.StatusBadRequest) + } + if !IsValidId(syncable.SyncableId) { + return NewAppError("GroupSyncable.SyncableIsValid", "model.group_syncable.syncable_id.app_error", nil, "", http.StatusBadRequest) + } + return nil +} + +func (syncable *GroupSyncable) UnmarshalJSON(b []byte) error { + var kvp map[string]any + err := json.Unmarshal(b, &kvp) + if err != nil { + return err + } + var channelId string + var teamId string + for key, value := range kvp { + switch key { + case "team_id": + teamId = value.(string) + case "channel_id": + channelId = value.(string) + case "group_id": + syncable.GroupId = value.(string) + case "auto_add": + syncable.AutoAdd = value.(bool) + default: + } + } + if channelId != "" { + syncable.TeamID = teamId + syncable.SyncableId = channelId + syncable.Type = GroupSyncableTypeChannel + } else { + syncable.SyncableId = teamId + syncable.Type = GroupSyncableTypeTeam + } + return nil +} + +func (syncable *GroupSyncable) MarshalJSON() ([]byte, error) { + type Alias GroupSyncable + switch syncable.Type { + case GroupSyncableTypeTeam: + return json.Marshal(&struct { + TeamID string `json:"team_id"` + TeamDisplayName string `json:"team_display_name,omitempty"` + TeamType string `json:"team_type,omitempty"` + Type GroupSyncableType `json:"type,omitempty"` + *Alias + }{ + TeamDisplayName: syncable.TeamDisplayName, + TeamType: syncable.TeamType, + TeamID: syncable.SyncableId, + Type: syncable.Type, + Alias: (*Alias)(syncable), + }) + case GroupSyncableTypeChannel: + return json.Marshal(&struct { + ChannelID string `json:"channel_id"` + ChannelDisplayName string `json:"channel_display_name,omitempty"` + ChannelType string `json:"channel_type,omitempty"` + Type GroupSyncableType `json:"type,omitempty"` + + TeamID string `json:"team_id,omitempty"` + TeamDisplayName string `json:"team_display_name,omitempty"` + TeamType string `json:"team_type,omitempty"` + + *Alias + }{ + ChannelID: syncable.SyncableId, + ChannelDisplayName: syncable.ChannelDisplayName, + ChannelType: syncable.ChannelType, + Type: syncable.Type, + + TeamID: syncable.TeamID, + TeamDisplayName: syncable.TeamDisplayName, + TeamType: syncable.TeamType, + + Alias: (*Alias)(syncable), + }) + default: + return nil, fmt.Errorf("unknown syncable type: %s", syncable.Type) + } +} + +type GroupSyncablePatch struct { + AutoAdd *bool `json:"auto_add"` + SchemeAdmin *bool `json:"scheme_admin"` +} + +func (syncable *GroupSyncablePatch) Auditable() map[string]any { + return map[string]any{ + "auto_add": syncable.AutoAdd, + "scheme_admin": syncable.SchemeAdmin, + } +} + +func (syncable *GroupSyncable) Patch(patch *GroupSyncablePatch) { + if patch.AutoAdd != nil { + syncable.AutoAdd = *patch.AutoAdd + } + if patch.SchemeAdmin != nil { + syncable.SchemeAdmin = *patch.SchemeAdmin + } +} + +type UserTeamIDPair struct { + UserID string + TeamID string +} + +type UserChannelIDPair struct { + UserID string + ChannelID string +} + +func NewGroupTeam(groupID, teamID string, autoAdd bool) *GroupSyncable { + return &GroupSyncable{ + GroupId: groupID, + SyncableId: teamID, + Type: GroupSyncableTypeTeam, + AutoAdd: autoAdd, + } +} + +func NewGroupChannel(groupID, channelID string, autoAdd bool) *GroupSyncable { + return &GroupSyncable{ + GroupId: groupID, + SyncableId: channelID, + Type: GroupSyncableTypeChannel, + AutoAdd: autoAdd, + } +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/guest_invite.go b/vendor/github.com/mattermost/mattermost/server/public/model/guest_invite.go new file mode 100644 index 00000000..c4171306 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/guest_invite.go @@ -0,0 +1,46 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "net/http" +) + +type GuestsInvite struct { + Emails []string `json:"emails"` + Channels []string `json:"channels"` + Message string `json:"message"` +} + +func (i *GuestsInvite) Auditable() map[string]any { + return map[string]any{ + "emails": i.Emails, + "channels": i.Channels, + } +} + +// IsValid validates the user and returns an error if it isn't configured +// correctly. +func (i *GuestsInvite) IsValid() *AppError { + if len(i.Emails) == 0 { + return NewAppError("GuestsInvite.IsValid", "model.guest.is_valid.emails.app_error", nil, "", http.StatusBadRequest) + } + + for _, email := range i.Emails { + if len(email) > UserEmailMaxLength || email == "" || !IsValidEmail(email) { + return NewAppError("GuestsInvite.IsValid", "model.guest.is_valid.email.app_error", nil, "email="+email, http.StatusBadRequest) + } + } + + if len(i.Channels) == 0 { + return NewAppError("GuestsInvite.IsValid", "model.guest.is_valid.channels.app_error", nil, "", http.StatusBadRequest) + } + + for _, channel := range i.Channels { + if len(channel) != 26 { + return NewAppError("GuestsInvite.IsValid", "model.guest.is_valid.channel.app_error", nil, "channel="+channel, http.StatusBadRequest) + } + } + return nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/hosted_customer.go b/vendor/github.com/mattermost/mattermost/server/public/model/hosted_customer.go new file mode 100644 index 00000000..ffaec15c --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/hosted_customer.go @@ -0,0 +1,10 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +type SubscribeNewsletterRequest struct { + Email string `json:"email"` + ServerID string `json:"server_id"` + SubscribedContent string `json:"subscribed_content"` +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/incoming_webhook.go b/vendor/github.com/mattermost/mattermost/server/public/model/incoming_webhook.go new file mode 100644 index 00000000..66d09b7c --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/incoming_webhook.go @@ -0,0 +1,208 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "bytes" + "encoding/json" + "io" + "net/http" + "regexp" +) + +const ( + DefaultWebhookUsername = "webhook" +) + +type IncomingWebhook struct { + Id string `json:"id"` + CreateAt int64 `json:"create_at"` + UpdateAt int64 `json:"update_at"` + DeleteAt int64 `json:"delete_at"` + UserId string `json:"user_id"` + ChannelId string `json:"channel_id"` + TeamId string `json:"team_id"` + DisplayName string `json:"display_name"` + Description string `json:"description"` + Username string `json:"username"` + IconURL string `json:"icon_url"` + ChannelLocked bool `json:"channel_locked"` +} + +func (o *IncomingWebhook) Auditable() map[string]any { + return map[string]any{ + "id": o.Id, + "create_at": o.CreateAt, + "update_at": o.UpdateAt, + "delete_at": o.DeleteAt, + "user_id": o.UserId, + "channel_id": o.ChannelId, + "team_id": o.TeamId, + "display_name": o.DisplayName, + "description": o.Description, + "username": o.Username, + "icon_url:": o.IconURL, + "channel_locked": o.ChannelLocked, + } +} + +type IncomingWebhookRequest struct { + Text string `json:"text"` + Username string `json:"username"` + IconURL string `json:"icon_url"` + ChannelName string `json:"channel"` + Props StringInterface `json:"props"` + Attachments []*SlackAttachment `json:"attachments"` + Type string `json:"type"` + IconEmoji string `json:"icon_emoji"` + Priority *PostPriority `json:"priority"` +} + +type IncomingWebhooksWithCount struct { + Webhooks []*IncomingWebhook `json:"incoming_webhooks"` + TotalCount int64 `json:"total_count"` +} + +func (o *IncomingWebhook) IsValid() *AppError { + if !IsValidId(o.Id) { + return NewAppError("IncomingWebhook.IsValid", "model.incoming_hook.id.app_error", map[string]any{"Id": o.Id}, "", http.StatusBadRequest) + } + + if o.CreateAt == 0 { + return NewAppError("IncomingWebhook.IsValid", "model.incoming_hook.create_at.app_error", nil, "id="+o.Id, http.StatusBadRequest) + } + + if o.UpdateAt == 0 { + return NewAppError("IncomingWebhook.IsValid", "model.incoming_hook.update_at.app_error", nil, "id="+o.Id, http.StatusBadRequest) + } + + if !IsValidId(o.UserId) { + return NewAppError("IncomingWebhook.IsValid", "model.incoming_hook.user_id.app_error", nil, "", http.StatusBadRequest) + } + + if !IsValidId(o.ChannelId) { + return NewAppError("IncomingWebhook.IsValid", "model.incoming_hook.channel_id.app_error", nil, "", http.StatusBadRequest) + } + + if !IsValidId(o.TeamId) { + return NewAppError("IncomingWebhook.IsValid", "model.incoming_hook.team_id.app_error", nil, "", http.StatusBadRequest) + } + + if len(o.DisplayName) > 64 { + return NewAppError("IncomingWebhook.IsValid", "model.incoming_hook.display_name.app_error", nil, "", http.StatusBadRequest) + } + + if len(o.Description) > 500 { + return NewAppError("IncomingWebhook.IsValid", "model.incoming_hook.description.app_error", nil, "", http.StatusBadRequest) + } + + if len(o.Username) > 64 { + return NewAppError("IncomingWebhook.IsValid", "model.incoming_hook.username.app_error", nil, "", http.StatusBadRequest) + } + + if len(o.IconURL) > 1024 { + return NewAppError("IncomingWebhook.IsValid", "model.incoming_hook.icon_url.app_error", nil, "", http.StatusBadRequest) + } + + return nil +} + +func (o *IncomingWebhook) PreSave() { + if o.Id == "" { + o.Id = NewId() + } + + o.CreateAt = GetMillis() + o.UpdateAt = o.CreateAt +} + +func (o *IncomingWebhook) PreUpdate() { + o.UpdateAt = GetMillis() +} + +// escapeControlCharsFromPayload escapes control chars (\n, \t) from a byte slice. +// Context: +// JSON strings are not supposed to contain control characters such as \n, \t, +// ... but some incoming webhooks might still send invalid JSON and we want to +// try to handle that. An example invalid JSON string from an incoming webhook +// might look like this (strings for both "text" and "fallback" attributes are +// invalid JSON strings because they contain unescaped newlines and tabs): +// +// `{ +// "text": "this is a test +// that contains a newline and tabs", +// "attachments": [ +// { +// "fallback": "Required plain-text summary of the attachment +// that contains a newline and tabs", +// "color": "#36a64f", +// ... +// "text": "Optional text that appears within the attachment +// that contains a newline and tabs", +// ... +// "thumb_url": "http://example.com/path/to/thumb.png" +// } +// ] +// }` +// +// This function will search for `"key": "value"` pairs, and escape \n, \t +// from the value. +func escapeControlCharsFromPayload(by []byte) []byte { + // we'll search for `"text": "..."` or `"fallback": "..."`, ... + keys := "text|fallback|pretext|author_name|title|value" + + // the regexp reads like this: + // (?s): this flag let . match \n (default is false) + // "(keys)": we search for the keys defined above + // \s*:\s*: followed by 0..n spaces/tabs, a colon then 0..n spaces/tabs + // ": a double-quote + // (\\"|[^"])*: any number of times the `\"` string or any char but a double-quote + // ": a double-quote + r := `(?s)"(` + keys + `)"\s*:\s*"(\\"|[^"])*"` + re := regexp.MustCompile(r) + + // the function that will escape \n and \t on the regexp matches + repl := func(b []byte) []byte { + if bytes.Contains(b, []byte("\n")) { + b = bytes.Replace(b, []byte("\n"), []byte("\\n"), -1) + } + if bytes.Contains(b, []byte("\t")) { + b = bytes.Replace(b, []byte("\t"), []byte("\\t"), -1) + } + + return b + } + + return re.ReplaceAllFunc(by, repl) +} + +func decodeIncomingWebhookRequest(by []byte) (*IncomingWebhookRequest, error) { + decoder := json.NewDecoder(bytes.NewReader(by)) + var o IncomingWebhookRequest + err := decoder.Decode(&o) + if err == nil { + return &o, nil + } + return nil, err +} + +func IncomingWebhookRequestFromJSON(data io.Reader) (*IncomingWebhookRequest, *AppError) { + buf := new(bytes.Buffer) + buf.ReadFrom(data) + by := buf.Bytes() + + // Try to decode the JSON data. Only if it fails, try to escape control + // characters from the strings contained in the JSON data. + o, err := decodeIncomingWebhookRequest(by) + if err != nil { + o, err = decodeIncomingWebhookRequest(escapeControlCharsFromPayload(by)) + if err != nil { + return nil, NewAppError("IncomingWebhookRequestFromJSON", "model.incoming_hook.parse_data.app_error", nil, "", http.StatusBadRequest).Wrap(err) + } + } + + o.Attachments = StringifySlackFieldValue(o.Attachments) + + return o, nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/initial_load.go b/vendor/github.com/mattermost/mattermost/server/public/model/initial_load.go new file mode 100644 index 00000000..5ecddda2 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/initial_load.go @@ -0,0 +1,14 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +type InitialLoad struct { + User *User `json:"user"` + TeamMembers []*TeamMember `json:"team_members"` + Teams []*Team `json:"teams"` + Preferences Preferences `json:"preferences"` + ClientCfg map[string]string `json:"client_cfg"` + LicenseCfg map[string]string `json:"license_cfg"` + NoAccounts bool `json:"no_accounts"` +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/integration_action.go b/vendor/github.com/mattermost/mattermost/server/public/model/integration_action.go new file mode 100644 index 00000000..ae599c5f --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/integration_action.go @@ -0,0 +1,958 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "crypto" + "crypto/aes" + "crypto/cipher" + "crypto/ecdsa" + "crypto/rand" + "encoding/asn1" + "encoding/base64" + "encoding/json" + "fmt" + "io" + "math/big" + "net/http" + "reflect" + "slices" + "strconv" + "strings" + "time" + + "github.com/hashicorp/go-multierror" + "github.com/pkg/errors" +) + +const ( + PostActionTypeButton = "button" + PostActionTypeSelect = "select" + DialogTitleMaxLength = 24 + DialogElementDisplayNameMaxLength = 24 + DialogElementNameMaxLength = 300 + DialogElementHelpTextMaxLength = 150 + DialogElementTextMaxLength = 150 + DialogElementTextareaMaxLength = 3000 + DialogElementSelectMaxLength = 3000 + DialogElementBoolMaxLength = 150 + DefaultTimeIntervalMinutes = 60 // Default time interval for DateTime fields + + // Go date/time format constants + ISODateFormat = "2006-01-02" // YYYY-MM-DD + ISODateTimeFormat = "2006-01-02T15:04:05Z" // RFC3339 UTC + ISODateTimeWithTimezoneFormat = "2006-01-02T15:04:05-07:00" // RFC3339 with timezone + ISODateTimeNoTimezoneFormat = "2006-01-02T15:04:05" // ISO datetime without timezone + ISODateTimeNoSecondsFormat = "2006-01-02T15:04" // ISO datetime without seconds +) + +// Common datetime formats used by both date and datetime validation +var commonDateTimeFormats = []string{ + ISODateTimeFormat, // RFC3339 UTC + ISODateTimeWithTimezoneFormat, // RFC3339 with timezone + ISODateTimeNoTimezoneFormat, // ISO datetime without timezone + ISODateTimeNoSecondsFormat, // ISO datetime without seconds +} + +var PostActionRetainPropKeys = []string{PostPropsFromWebhook, PostPropsOverrideUsername, PostPropsOverrideIconURL} + +type DoPostActionRequest struct { + SelectedOption string `json:"selected_option,omitempty"` + Cookie string `json:"cookie,omitempty"` +} + +const ( + PostActionDataSourceUsers = "users" + PostActionDataSourceChannels = "channels" +) + +type PostAction struct { + // A unique Action ID. If not set, generated automatically. + Id string `json:"id,omitempty"` + + // The type of the interactive element. Currently supported are + // "select" and "button". + Type string `json:"type,omitempty"` + + // The text on the button, or in the select placeholder. + Name string `json:"name,omitempty"` + + // If the action is disabled. + Disabled bool `json:"disabled,omitempty"` + + // Style defines a text and border style. + // Supported values are "default", "primary", "success", "good", "warning", "danger" + // and any hex color. + Style string `json:"style,omitempty"` + + // DataSource indicates the data source for the select action. If left + // empty, the select is populated from Options. Other supported values + // are "users" and "channels". + DataSource string `json:"data_source,omitempty"` + + // Options contains the values listed in a select dropdown on the post. + Options []*PostActionOptions `json:"options,omitempty"` + + // DefaultOption contains the option, if any, that will appear as the + // default selection in a select box. It has no effect when used with + // other types of actions. + DefaultOption string `json:"default_option,omitempty"` + + // Defines the interaction with the backend upon a user action. + // Integration contains Context, which is private plugin data; + // Integrations are stripped from Posts when they are sent to the + // client, or are encrypted in a Cookie. + Integration *PostActionIntegration `json:"integration,omitempty"` + Cookie string `json:"cookie,omitempty" db:"-"` +} + +// IsValid validates the action and returns an error if it is invalid. +func (p *PostAction) IsValid() error { + var multiErr *multierror.Error + + if p.Name == "" { + multiErr = multierror.Append(multiErr, fmt.Errorf("action must have a name")) + } + + if p.Style != "" { + validStyles := []string{"default", "primary", "success", "good", "warning", "danger"} + // If not a predefined style, check if it's a hex color + if !slices.Contains(validStyles, p.Style) && !hexColorRegex.MatchString(p.Style) { + multiErr = multierror.Append(multiErr, fmt.Errorf("invalid style '%s' - must be one of [default, primary, success, good, warning, danger] or a hex color", p.Style)) + } + } + + switch p.Type { + case PostActionTypeButton: + if len(p.Options) > 0 { + multiErr = multierror.Append(multiErr, fmt.Errorf("button action must not have options")) + } + if p.DataSource != "" { + multiErr = multierror.Append(multiErr, fmt.Errorf("button action must not have a data source")) + } + case PostActionTypeSelect: + if p.DataSource != "" { + validSources := []string{PostActionDataSourceUsers, PostActionDataSourceChannels} + if !slices.Contains(validSources, p.DataSource) { + multiErr = multierror.Append(multiErr, fmt.Errorf("invalid data_source '%s' for select action", p.DataSource)) + } + + if len(p.Options) > 0 { + multiErr = multierror.Append(multiErr, fmt.Errorf("select action cannot have both DataSource and Options set")) + } + } else { + if len(p.Options) == 0 { + multiErr = multierror.Append(multiErr, fmt.Errorf("select action must have either DataSource or Options set")) + } else { + for i, opt := range p.Options { + if opt == nil { + multiErr = multierror.Append(multiErr, fmt.Errorf("select action contains nil option")) + continue + } + if err := opt.IsValid(); err != nil { + multiErr = multierror.Append(multiErr, multierror.Prefix(err, fmt.Sprintf("option at index %d is invalid:", i))) + } + } + } + } + default: + multiErr = multierror.Append(multiErr, fmt.Errorf("invalid action type: must be '%s' or '%s'", PostActionTypeButton, PostActionTypeSelect)) + } + + if p.Integration == nil { + multiErr = multierror.Append(multiErr, fmt.Errorf("action must have integration settings")) + } else { + if p.Integration.URL == "" { + multiErr = multierror.Append(multiErr, fmt.Errorf("action must have an integration URL")) + } + if !(strings.HasPrefix(p.Integration.URL, "/plugins/") || strings.HasPrefix(p.Integration.URL, "plugins/") || IsValidHTTPURL(p.Integration.URL)) { + multiErr = multierror.Append(multiErr, fmt.Errorf("action must have an valid integration URL")) + } + } + + return multiErr.ErrorOrNil() +} + +func (p *PostAction) Equals(input *PostAction) bool { + if p.Id != input.Id { + return false + } + + if p.Type != input.Type { + return false + } + + if p.Name != input.Name { + return false + } + + if p.DataSource != input.DataSource { + return false + } + + if p.DefaultOption != input.DefaultOption { + return false + } + + if p.Cookie != input.Cookie { + return false + } + + // Compare PostActionOptions + if len(p.Options) != len(input.Options) { + return false + } + + for k := range p.Options { + if p.Options[k].Text != input.Options[k].Text { + return false + } + + if p.Options[k].Value != input.Options[k].Value { + return false + } + } + + // Compare PostActionIntegration + + // If input is nil, then return true if original is also nil. + // Else return false. + if input.Integration == nil { + return p.Integration == nil + } + + // At this point, input is not nil, so return false if original is. + if p.Integration == nil { + return false + } + + // Both are unequal and not nil. + if p.Integration.URL != input.Integration.URL { + return false + } + + if len(p.Integration.Context) != len(input.Integration.Context) { + return false + } + + for key, value := range p.Integration.Context { + inputValue, ok := input.Integration.Context[key] + if !ok { + return false + } + + switch inputValue.(type) { + case string, bool, int, float64: + if value != inputValue { + return false + } + default: + if !reflect.DeepEqual(value, inputValue) { + return false + } + } + } + + return true +} + +// PostActionCookie is set by the server, serialized and encrypted into +// PostAction.Cookie. The clients should hold on to it, and include it with +// subsequent DoPostAction requests. This allows the server to access the +// action metadata even when it's not available in the database, for ephemeral +// posts. +type PostActionCookie struct { + Type string `json:"type,omitempty"` + PostId string `json:"post_id,omitempty"` + RootPostId string `json:"root_post_id,omitempty"` + ChannelId string `json:"channel_id,omitempty"` + DataSource string `json:"data_source,omitempty"` + Integration *PostActionIntegration `json:"integration,omitempty"` + RetainProps map[string]any `json:"retain_props,omitempty"` + RemoveProps []string `json:"remove_props,omitempty"` +} + +type PostActionOptions struct { + Text string `json:"text"` + Value string `json:"value"` +} + +func (o *PostActionOptions) IsValid() error { + var multiErr *multierror.Error + + if o.Text == "" { + multiErr = multierror.Append(multiErr, fmt.Errorf("text is required")) + } + if o.Value == "" { + multiErr = multierror.Append(multiErr, fmt.Errorf("value is required")) + } + + return multiErr.ErrorOrNil() +} + +type PostActionIntegration struct { + // URL is the endpoint that the action will be sent to. + // It can be a relative path to a plugin. + URL string `json:"url,omitempty"` + Context map[string]any `json:"context,omitempty"` +} + +type PostActionIntegrationRequest struct { + UserId string `json:"user_id"` + UserName string `json:"user_name"` + ChannelId string `json:"channel_id"` + ChannelName string `json:"channel_name"` + TeamId string `json:"team_id"` + TeamName string `json:"team_domain"` + PostId string `json:"post_id"` + TriggerId string `json:"trigger_id"` + Type string `json:"type"` + DataSource string `json:"data_source"` + Context map[string]any `json:"context,omitempty"` +} + +type PostActionIntegrationResponse struct { + Update *Post `json:"update"` + EphemeralText string `json:"ephemeral_text"` + SkipSlackParsing bool `json:"skip_slack_parsing"` // Set to `true` to skip the Slack-compatibility handling of Text. +} + +type PostActionAPIResponse struct { + Status string `json:"status"` // needed to maintain backwards compatibility + TriggerId string `json:"trigger_id"` +} + +type Dialog struct { + CallbackId string `json:"callback_id"` + Title string `json:"title"` + IntroductionText string `json:"introduction_text"` + IconURL string `json:"icon_url"` + Elements []DialogElement `json:"elements"` + SubmitLabel string `json:"submit_label"` + NotifyOnCancel bool `json:"notify_on_cancel"` + State string `json:"state"` + SourceURL string `json:"source_url,omitempty"` +} + +type DialogElement struct { + DisplayName string `json:"display_name"` + Name string `json:"name"` + Type string `json:"type"` + SubType string `json:"subtype"` + Default string `json:"default"` + Placeholder string `json:"placeholder"` + HelpText string `json:"help_text"` + Optional bool `json:"optional"` + MinLength int `json:"min_length"` + MaxLength int `json:"max_length"` + DataSource string `json:"data_source"` + DataSourceURL string `json:"data_source_url,omitempty"` + Options []*PostActionOptions `json:"options"` + MultiSelect bool `json:"multiselect"` + Refresh bool `json:"refresh,omitempty"` + // Date/datetime field specific properties + MinDate string `json:"min_date,omitempty"` + MaxDate string `json:"max_date,omitempty"` + TimeInterval int `json:"time_interval,omitempty"` +} + +type OpenDialogRequest struct { + TriggerId string `json:"trigger_id"` + URL string `json:"url"` + Dialog Dialog `json:"dialog"` +} + +type SubmitDialogRequest struct { + Type string `json:"type"` + URL string `json:"url,omitempty"` + CallbackId string `json:"callback_id"` + State string `json:"state"` + UserId string `json:"user_id"` + ChannelId string `json:"channel_id"` + TeamId string `json:"team_id"` + Submission map[string]any `json:"submission"` + Cancelled bool `json:"cancelled"` +} + +type SubmitDialogResponseType string + +const ( + SubmitDialogResponseTypeEmpty SubmitDialogResponseType = "" + SubmitDialogResponseTypeOK SubmitDialogResponseType = "ok" + SubmitDialogResponseTypeForm SubmitDialogResponseType = "form" + SubmitDialogResponseTypeNavigate SubmitDialogResponseType = "navigate" +) + +type SubmitDialogResponse struct { + Error string `json:"error,omitempty"` + Errors map[string]string `json:"errors,omitempty"` + Type string `json:"type,omitempty"` + Form *Dialog `json:"form,omitempty"` +} + +func (r *SubmitDialogResponse) IsValid() error { + // If Error or Errors are set, this is valid and everything else is ignored + if r.Error != "" || len(r.Errors) > 0 { + return nil + } + + // Validate Type field and handle Form field appropriately for each type + switch SubmitDialogResponseType(r.Type) { + case SubmitDialogResponseTypeEmpty, SubmitDialogResponseTypeOK, SubmitDialogResponseTypeNavigate: + // Completion types - Form field should be nil + if r.Form != nil { + return errors.Errorf("form field must be nil for type %q", r.Type) + } + case SubmitDialogResponseTypeForm: + // Continuation type - Form field is required and must be valid + if r.Form == nil { + return errors.New("form field is required for form type") + } + if err := r.Form.IsValid(); err != nil { + return errors.Wrap(err, "invalid form") + } + default: + return errors.Errorf("invalid type %q, must be one of: empty, ok, form, navigate", r.Type) + } + + return nil +} + +// DialogSelectOption represents an option in a select dropdown for dialogs +type DialogSelectOption struct { + Text string `json:"text"` + Value string `json:"value"` +} + +// LookupDialogResponse represents the response for a lookup dialog request. +type LookupDialogResponse struct { + Items []DialogSelectOption `json:"items"` +} + +// signForGenerateTriggerId wraps the signing operation with panic recovery +// to handle invalid signers that may cause panics in the crypto package +func signForGenerateTriggerId(s crypto.Signer, digest []byte, opts crypto.SignerOpts) (signature []byte, err error) { + defer func() { + if r := recover(); r != nil { + signature = nil + err = fmt.Errorf("invalid signing key: %v", r) + } + }() + + return s.Sign(rand.Reader, digest, opts) +} + +func GenerateTriggerId(userId string, s crypto.Signer) (string, string, *AppError) { + clientTriggerId := NewId() + triggerData := strings.Join([]string{clientTriggerId, userId, strconv.FormatInt(GetMillis(), 10)}, ":") + ":" + + h := crypto.SHA256 + sum := h.New() + sum.Write([]byte(triggerData)) + signature, err := signForGenerateTriggerId(s, sum.Sum(nil), h) + if err != nil { + return "", "", NewAppError("GenerateTriggerId", "interactive_message.generate_trigger_id.signing_failed", nil, "", http.StatusInternalServerError).Wrap(err) + } + + base64Sig := base64.StdEncoding.EncodeToString(signature) + + triggerId := base64.StdEncoding.EncodeToString([]byte(triggerData + base64Sig)) + return clientTriggerId, triggerId, nil +} + +func (r *PostActionIntegrationRequest) GenerateTriggerId(s crypto.Signer) (string, string, *AppError) { + clientTriggerId, triggerId, appErr := GenerateTriggerId(r.UserId, s) + if appErr != nil { + return "", "", appErr + } + + r.TriggerId = triggerId + return clientTriggerId, triggerId, nil +} + +func DecodeAndVerifyTriggerId(triggerId string, s *ecdsa.PrivateKey, timeout time.Duration) (string, string, *AppError) { + triggerIdBytes, err := base64.StdEncoding.DecodeString(triggerId) + if err != nil { + return "", "", NewAppError("DecodeAndVerifyTriggerId", "interactive_message.decode_trigger_id.base64_decode_failed", nil, "", http.StatusBadRequest).Wrap(err) + } + + split := strings.Split(string(triggerIdBytes), ":") + if len(split) != 4 { + return "", "", NewAppError("DecodeAndVerifyTriggerId", "interactive_message.decode_trigger_id.missing_data", nil, "", http.StatusBadRequest) + } + + clientTriggerId := split[0] + userId := split[1] + timestampStr := split[2] + timestamp, _ := strconv.ParseInt(timestampStr, 10, 64) + + if time.Since(time.UnixMilli(timestamp)) > timeout { + return "", "", NewAppError("DecodeAndVerifyTriggerId", "interactive_message.decode_trigger_id.expired", map[string]any{"Duration": timeout.String()}, "", http.StatusBadRequest) + } + + signature, err := base64.StdEncoding.DecodeString(split[3]) + if err != nil { + return "", "", NewAppError("DecodeAndVerifyTriggerId", "interactive_message.decode_trigger_id.base64_decode_failed_signature", nil, "", http.StatusBadRequest).Wrap(err) + } + + var esig struct { + R, S *big.Int + } + + if _, err := asn1.Unmarshal(signature, &esig); err != nil { + return "", "", NewAppError("DecodeAndVerifyTriggerId", "interactive_message.decode_trigger_id.signature_decode_failed", nil, "", http.StatusBadRequest).Wrap(err) + } + + triggerData := strings.Join([]string{clientTriggerId, userId, timestampStr}, ":") + ":" + + h := crypto.SHA256 + sum := h.New() + sum.Write([]byte(triggerData)) + + if !ecdsa.Verify(&s.PublicKey, sum.Sum(nil), esig.R, esig.S) { + return "", "", NewAppError("DecodeAndVerifyTriggerId", "interactive_message.decode_trigger_id.verify_signature_failed", nil, "", http.StatusBadRequest) + } + + return clientTriggerId, userId, nil +} + +func (r *OpenDialogRequest) DecodeAndVerifyTriggerId(s *ecdsa.PrivateKey, timeout time.Duration) (string, string, *AppError) { + return DecodeAndVerifyTriggerId(r.TriggerId, s, timeout) +} + +func (r *OpenDialogRequest) IsValid() error { + var multiErr *multierror.Error + if r.URL == "" { + multiErr = multierror.Append(multiErr, errors.New("empty URL")) + } + + if r.TriggerId == "" { + multiErr = multierror.Append(multiErr, errors.New("empty trigger id")) + } + + err := r.Dialog.IsValid() + if err != nil { + multiErr = multierror.Append(multiErr, err) + } + + return multiErr.ErrorOrNil() +} + +func (d *Dialog) IsValid() error { + var multiErr *multierror.Error + + if d.Title == "" || len(d.Title) > DialogTitleMaxLength { + multiErr = multierror.Append(multiErr, errors.Errorf("invalid dialog title %q", d.Title)) + } + + if d.IconURL != "" && !IsValidHTTPURL(d.IconURL) { + multiErr = multierror.Append(multiErr, errors.New("invalid icon url")) + } + + if len(d.Elements) != 0 { + elementMap := make(map[string]bool) + + for _, element := range d.Elements { + if elementMap[element.Name] { + multiErr = multierror.Append(multiErr, errors.Errorf("duplicate dialog element %q", element.Name)) + } + elementMap[element.Name] = true + + err := element.IsValid() + if err != nil { + multiErr = multierror.Append(multiErr, errors.Wrapf(err, "%q field is not valid", element.Name)) + } + } + } + return multiErr.ErrorOrNil() +} + +func (e *DialogElement) IsValid() error { + var multiErr *multierror.Error + textSubTypes := map[string]bool{ + "": true, + "text": true, + "email": true, + "number": true, + "tel": true, + "url": true, + "password": true, + } + + if e.MinLength < 0 { + multiErr = multierror.Append(multiErr, errors.Errorf("min length cannot be a negative number, got %d", e.MinLength)) + } + if e.MinLength > e.MaxLength { + multiErr = multierror.Append(multiErr, errors.Errorf("min length should be less then max length, got %d > %d", e.MinLength, e.MaxLength)) + } + + multiErr = multierror.Append(multiErr, checkMaxLength("DisplayName", e.DisplayName, DialogElementDisplayNameMaxLength)) + multiErr = multierror.Append(multiErr, checkMaxLength("Name", e.Name, DialogElementNameMaxLength)) + multiErr = multierror.Append(multiErr, checkMaxLength("HelpText", e.HelpText, DialogElementHelpTextMaxLength)) + + if e.MultiSelect && e.Type != "select" { + multiErr = multierror.Append(multiErr, errors.Errorf("multiselect can only be used with select elements, got type %q", e.Type)) + } + + switch e.Type { + case "text": + multiErr = multierror.Append(multiErr, checkMaxLength("Default", e.Default, DialogElementTextMaxLength)) + multiErr = multierror.Append(multiErr, checkMaxLength("Placeholder", e.Placeholder, DialogElementTextMaxLength)) + if _, ok := textSubTypes[e.SubType]; !ok { + multiErr = multierror.Append(multiErr, errors.Errorf("invalid subtype %q", e.Type)) + } + + case "textarea": + multiErr = multierror.Append(multiErr, checkMaxLength("Default", e.Default, DialogElementTextareaMaxLength)) + multiErr = multierror.Append(multiErr, checkMaxLength("Placeholder", e.Placeholder, DialogElementTextareaMaxLength)) + + if _, ok := textSubTypes[e.SubType]; !ok { + multiErr = multierror.Append(multiErr, errors.Errorf("invalid subtype %q", e.Type)) + } + + case "select": + multiErr = multierror.Append(multiErr, checkMaxLength("Default", e.Default, DialogElementSelectMaxLength)) + multiErr = multierror.Append(multiErr, checkMaxLength("Placeholder", e.Placeholder, DialogElementSelectMaxLength)) + if e.DataSource != "" && e.DataSource != "users" && e.DataSource != "channels" && e.DataSource != "dynamic" { + multiErr = multierror.Append(multiErr, errors.Errorf("invalid data source %q, allowed are 'users', 'channels', or 'dynamic'", e.DataSource)) + } + if e.DataSource == "dynamic" { + // Dynamic selects should have a data_source_url + if e.DataSourceURL == "" { + multiErr = multierror.Append(multiErr, errors.New("dynamic data_source requires data_source_url")) + } else if !IsValidLookupURL(e.DataSourceURL) { + multiErr = multierror.Append(multiErr, errors.New("invalid data_source_url for dynamic select")) + } + // Dynamic selects should not have static options + if len(e.Options) > 0 { + multiErr = multierror.Append(multiErr, errors.New("dynamic select element should not have static options")) + } + } else if e.DataSource == "" { + if e.MultiSelect { + if !isMultiSelectDefaultInOptions(e.Default, e.Options) { + multiErr = multierror.Append(multiErr, errors.Errorf("multiselect default value %q contains values not in options", e.Default)) + } + } else if !isDefaultInOptions(e.Default, e.Options) { + multiErr = multierror.Append(multiErr, errors.Errorf("default value %q doesn't exist in options ", e.Default)) + } + } + + case "bool": + if e.Default != "" && e.Default != "true" && e.Default != "false" { + multiErr = multierror.Append(multiErr, errors.New("invalid default of bool")) + } + multiErr = multierror.Append(multiErr, checkMaxLength("Placeholder", e.Placeholder, DialogElementBoolMaxLength)) + + case "radio": + if !isDefaultInOptions(e.Default, e.Options) { + multiErr = multierror.Append(multiErr, errors.Errorf("default value %q doesn't exist in options ", e.Default)) + } + + case "date": + multiErr = multierror.Append(multiErr, checkMaxLength("Default", e.Default, DialogElementTextMaxLength)) + multiErr = multierror.Append(multiErr, checkMaxLength("Placeholder", e.Placeholder, DialogElementTextMaxLength)) + multiErr = multierror.Append(multiErr, validateDateFormat(e.Default)) + multiErr = multierror.Append(multiErr, validateDateFormat(e.MinDate)) + multiErr = multierror.Append(multiErr, validateDateFormat(e.MaxDate)) + + case "datetime": + multiErr = multierror.Append(multiErr, checkMaxLength("Default", e.Default, DialogElementTextMaxLength)) + multiErr = multierror.Append(multiErr, checkMaxLength("Placeholder", e.Placeholder, DialogElementTextMaxLength)) + multiErr = multierror.Append(multiErr, validateDateTimeFormat(e.Default)) + multiErr = multierror.Append(multiErr, validateDateFormat(e.MinDate)) + multiErr = multierror.Append(multiErr, validateDateFormat(e.MaxDate)) + // Validate time_interval for datetime fields + timeInterval := e.TimeInterval + if timeInterval == 0 { + multiErr = multierror.Append(multiErr, errors.Errorf("time_interval of 0 will be reset to default, %d minutes", DefaultTimeIntervalMinutes)) + } else if timeInterval < 1 || timeInterval > 1440 { + multiErr = multierror.Append(multiErr, errors.Errorf("time_interval must be between 1 and 1440 minutes, got %d", timeInterval)) + } else if 1440%timeInterval != 0 { + multiErr = multierror.Append(multiErr, errors.Errorf("time_interval must be a divisor of 1440 (24 hours * 60 minutes) to create valid time intervals, got %d", timeInterval)) + } + + default: + multiErr = multierror.Append(multiErr, errors.Errorf("invalid element type: %q", e.Type)) + } + + return multiErr.ErrorOrNil() +} + +func isDefaultInOptions(defaultValue string, options []*PostActionOptions) bool { + if defaultValue == "" { + return true + } + + for _, option := range options { + if option != nil && defaultValue == option.Value { + return true + } + } + + return false +} + +func isMultiSelectDefaultInOptions(defaultValue string, options []*PostActionOptions) bool { + if defaultValue == "" { + return true + } + + for value := range strings.SplitSeq(strings.ReplaceAll(defaultValue, " ", ""), ",") { + if value == "" { + continue + } + found := false + for _, option := range options { + if option != nil && value == option.Value { + found = true + break + } + } + if !found { + return false + } + } + + return true +} + +// validateRelativePattern validates relative date patterns like +1d, +2w, +1m +func validateRelativePattern(value string) bool { + if len(value) < 3 || len(value) > 5 || (value[0] != '+' && value[0] != '-') { + return false + } + + lastChar := strings.ToLower(string(value[len(value)-1])) + if !strings.Contains("dwm", lastChar) { + return false + } + + numberPart := value[1 : len(value)-1] + _, err := strconv.Atoi(numberPart) + return err == nil +} + +// isValidRelativeFormat checks if a string matches relative date patterns +func isValidRelativeFormat(value string) bool { + relativeFormats := []string{"today", "tomorrow", "yesterday"} + return slices.Contains(relativeFormats, value) || validateRelativePattern(value) +} + +// validateDateFormat validates date strings: ISO date, datetime (with warning), or relative formats +func validateDateFormat(dateStr string) error { + if dateStr == "" { + return nil + } + + if isValidRelativeFormat(dateStr) { + return nil + } + if _, err := time.Parse(ISODateFormat, dateStr); err == nil { + return nil + } + + for _, format := range commonDateTimeFormats { + if parsedTime, err := time.Parse(format, dateStr); err == nil { + dateOnly := parsedTime.Format(ISODateFormat) + return fmt.Errorf("date field received datetime format %q, only date portion %q will be used. Consider using date format instead", dateStr, dateOnly) + } + } + + return fmt.Errorf("invalid date format: %q, expected ISO format (YYYY-MM-DD), datetime format, or relative format", dateStr) +} + +// validateDateTimeFormat validates datetime strings: ISO datetime or relative formats +func validateDateTimeFormat(dateTimeStr string) error { + if dateTimeStr == "" || isValidRelativeFormat(dateTimeStr) { + return nil + } + + for _, format := range commonDateTimeFormats { + if _, err := time.Parse(format, dateTimeStr); err == nil { + return nil + } + } + + return fmt.Errorf("invalid datetime format: %q, expected ISO format (YYYY-MM-DDTHH:MM:SSZ) or relative format", dateTimeStr) +} + +func checkMaxLength(fieldName string, field string, maxLength int) error { + // DisplayName and Name are required fields + if fieldName == "DisplayName" || fieldName == "Name" { + if len(field) == 0 { + return errors.Errorf("%v cannot be empty", fieldName) + } + } + + if len(field) > maxLength { + return errors.Errorf("%v cannot be longer than %d characters, got %d", fieldName, maxLength, len(field)) + } + + return nil +} + +func (o *Post) StripActionIntegrations() { + attachments := o.Attachments() + if o.GetProp(PostPropsAttachments) != nil { + o.AddProp(PostPropsAttachments, attachments) + } + for _, attachment := range attachments { + for _, action := range attachment.Actions { + action.Integration = nil + } + } +} + +func (o *Post) GetAction(id string) *PostAction { + for _, attachment := range o.Attachments() { + for _, action := range attachment.Actions { + if action != nil && action.Id == id { + return action + } + } + } + return nil +} + +func (o *Post) GenerateActionIds() { + if o.GetProp(PostPropsAttachments) != nil { + o.AddProp(PostPropsAttachments, o.Attachments()) + } + if attachments, ok := o.GetProp(PostPropsAttachments).([]*SlackAttachment); ok { + for _, attachment := range attachments { + for _, action := range attachment.Actions { + if action != nil && action.Id == "" { + action.Id = NewId() + } + } + } + } +} + +func AddPostActionCookies(o *Post, secret []byte) *Post { + p := o.Clone() + + // retainedProps carry over their value from the old post, including no value + retainProps := map[string]any{} + removeProps := []string{} + for _, key := range PostActionRetainPropKeys { + value, ok := p.GetProps()[key] + if ok { + retainProps[key] = value + } else { + removeProps = append(removeProps, key) + } + } + + attachments := p.Attachments() + for _, attachment := range attachments { + for _, action := range attachment.Actions { + c := &PostActionCookie{ + Type: action.Type, + ChannelId: p.ChannelId, + DataSource: action.DataSource, + Integration: action.Integration, + RetainProps: retainProps, + RemoveProps: removeProps, + } + + c.PostId = p.Id + if p.RootId == "" { + c.RootPostId = p.Id + } else { + c.RootPostId = p.RootId + } + + b, _ := json.Marshal(c) + action.Cookie, _ = encryptPostActionCookie(string(b), secret) + } + } + + return p +} + +func encryptPostActionCookie(plain string, secret []byte) (string, error) { + if len(secret) == 0 { + return plain, nil + } + + block, err := aes.NewCipher(secret) + if err != nil { + return "", err + } + + aesgcm, err := cipher.NewGCM(block) + if err != nil { + return "", err + } + + nonce := make([]byte, aesgcm.NonceSize()) + _, err = io.ReadFull(rand.Reader, nonce) + if err != nil { + return "", err + } + + sealed := aesgcm.Seal(nil, nonce, []byte(plain), nil) + + combined := append(nonce, sealed...) //nolint:makezero + encoded := make([]byte, base64.StdEncoding.EncodedLen(len(combined))) + base64.StdEncoding.Encode(encoded, combined) + + return string(encoded), nil +} + +func DecryptPostActionCookie(encoded string, secret []byte) (string, error) { + if len(secret) == 0 { + return encoded, nil + } + + block, err := aes.NewCipher(secret) + if err != nil { + return "", err + } + + aesgcm, err := cipher.NewGCM(block) + if err != nil { + return "", err + } + + decoded := make([]byte, base64.StdEncoding.DecodedLen(len(encoded))) + n, err := base64.StdEncoding.Decode(decoded, []byte(encoded)) + if err != nil { + return "", err + } + decoded = decoded[:n] + + nonceSize := aesgcm.NonceSize() + if len(decoded) < nonceSize { + return "", fmt.Errorf("cookie too short") + } + + nonce, decoded := decoded[:nonceSize], decoded[nonceSize:] + plain, err := aesgcm.Open(nil, nonce, decoded, nil) + if err != nil { + return "", err + } + + return string(plain), nil +} + +// IsValidLookupURL validates if a URL is safe for lookup operations +func IsValidLookupURL(url string) bool { + if url == "" { + return false + } + + // Allow plugin paths that start with /plugins/ + if strings.HasPrefix(url, "/plugins/") { + // Additional validation for plugin paths - ensure no path traversal + if strings.Contains(url, "..") || strings.Contains(url, "//") { + return false + } + return true + } + + // For external URLs, use the same basic validation as other models + return IsValidHTTPURL(url) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/integrity.go b/vendor/github.com/mattermost/mattermost/server/public/model/integrity.go new file mode 100644 index 00000000..7cc3d4e5 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/integrity.go @@ -0,0 +1,58 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "encoding/json" + "errors" +) + +type OrphanedRecord struct { + ParentId *string `json:"parent_id"` + ChildId *string `json:"child_id"` +} + +type RelationalIntegrityCheckData struct { + ParentName string `json:"parent_name"` + ChildName string `json:"child_name"` + ParentIdAttr string `json:"parent_id_attr"` + ChildIdAttr string `json:"child_id_attr"` + Records []OrphanedRecord `json:"records"` +} + +type IntegrityCheckResult struct { + Data any `json:"data"` + Err error `json:"err"` +} + +func (r *IntegrityCheckResult) UnmarshalJSON(b []byte) error { + var data map[string]any + if err := json.Unmarshal(b, &data); err != nil { + return err + } + if d, ok := data["data"]; ok && d != nil { + var rdata RelationalIntegrityCheckData + m := d.(map[string]any) + rdata.ParentName = m["parent_name"].(string) + rdata.ChildName = m["child_name"].(string) + rdata.ParentIdAttr = m["parent_id_attr"].(string) + rdata.ChildIdAttr = m["child_id_attr"].(string) + for _, recData := range m["records"].([]any) { + var record OrphanedRecord + m := recData.(map[string]any) + if val := m["parent_id"]; val != nil { + record.ParentId = NewPointer(val.(string)) + } + if val := m["child_id"]; val != nil { + record.ChildId = NewPointer(val.(string)) + } + rdata.Records = append(rdata.Records, record) + } + r.Data = rdata + } + if err, ok := data["err"]; ok && err != nil { + r.Err = errors.New(data["err"].(string)) + } + return nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/ip_filtering.go b/vendor/github.com/mattermost/mattermost/server/public/model/ip_filtering.go new file mode 100644 index 00000000..6523af76 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/ip_filtering.go @@ -0,0 +1,20 @@ +package model + +type AllowedIPRanges []AllowedIPRange + +type AllowedIPRange struct { + CIDRBlock string `json:"cidr_block"` + Description string `json:"description"` + Enabled bool `json:"enabled"` + OwnerID string `json:"owner_id"` +} + +func (air *AllowedIPRanges) Auditable() map[string]any { + return map[string]any{ + "AllowedIPRanges": air, + } +} + +type GetIPAddressResponse struct { + IP string `json:"ip"` +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/job.go b/vendor/github.com/mattermost/mattermost/server/public/model/job.go new file mode 100644 index 00000000..a17d6ae5 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/job.go @@ -0,0 +1,244 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "net/http" + + "github.com/mattermost/mattermost/server/public/utils/timeutils" +) + +const ( + JobTypeDataRetention = "data_retention" + JobTypeMessageExport = "message_export" + JobTypeCLIMessageExport = "cli_message_export" + JobTypeElasticsearchPostIndexing = "elasticsearch_post_indexing" + JobTypeElasticsearchPostAggregation = "elasticsearch_post_aggregation" + JobTypeLdapSync = "ldap_sync" + JobTypeMigrations = "migrations" + JobTypePlugins = "plugins" + JobTypeExpiryNotify = "expiry_notify" + JobTypeProductNotices = "product_notices" + JobTypeActiveUsers = "active_users" + JobTypeImportProcess = "import_process" + JobTypeImportDelete = "import_delete" + JobTypeExportProcess = "export_process" + JobTypeExportDelete = "export_delete" + JobTypeCloud = "cloud" + JobTypeResendInvitationEmail = "resend_invitation_email" + JobTypeExtractContent = "extract_content" + JobTypeLastAccessiblePost = "last_accessible_post" + JobTypeLastAccessibleFile = "last_accessible_file" + JobTypeUpgradeNotifyAdmin = "upgrade_notify_admin" + JobTypeTrialNotifyAdmin = "trial_notify_admin" + JobTypePostPersistentNotifications = "post_persistent_notifications" + JobTypeInstallPluginNotifyAdmin = "install_plugin_notify_admin" + JobTypeHostedPurchaseScreening = "hosted_purchase_screening" + JobTypeS3PathMigration = "s3_path_migration" + JobTypeCleanupDesktopTokens = "cleanup_desktop_tokens" + JobTypeDeleteEmptyDraftsMigration = "delete_empty_drafts_migration" + JobTypeRefreshMaterializedViews = "refresh_materialized_views" + JobTypeDeleteOrphanDraftsMigration = "delete_orphan_drafts_migration" + JobTypeExportUsersToCSV = "export_users_to_csv" + JobTypeDeleteDmsPreferencesMigration = "delete_dms_preferences_migration" + JobTypeMobileSessionMetadata = "mobile_session_metadata" + JobTypeAccessControlSync = "access_control_sync" + JobTypePushProxyAuth = "push_proxy_auth" + + JobStatusPending = "pending" + JobStatusInProgress = "in_progress" + JobStatusSuccess = "success" + JobStatusError = "error" + JobStatusCancelRequested = "cancel_requested" + JobStatusCanceled = "canceled" + JobStatusWarning = "warning" +) + +var AllJobTypes = [...]string{ + JobTypeDataRetention, + JobTypeMessageExport, + JobTypeElasticsearchPostIndexing, + JobTypeElasticsearchPostAggregation, + JobTypeLdapSync, + JobTypeMigrations, + JobTypePlugins, + JobTypeExpiryNotify, + JobTypeProductNotices, + JobTypeActiveUsers, + JobTypeImportProcess, + JobTypeImportDelete, + JobTypeExportProcess, + JobTypeExportDelete, + JobTypeCloud, + JobTypeExtractContent, + JobTypeLastAccessiblePost, + JobTypeLastAccessibleFile, + JobTypeCleanupDesktopTokens, + JobTypeRefreshMaterializedViews, + JobTypeMobileSessionMetadata, +} + +type Job struct { + Id string `json:"id"` + Type string `json:"type"` + Priority int64 `json:"priority"` + CreateAt int64 `json:"create_at"` + StartAt int64 `json:"start_at"` + LastActivityAt int64 `json:"last_activity_at"` + Status string `json:"status"` + Progress int64 `json:"progress"` + Data StringMap `json:"data"` +} + +func (j *Job) Auditable() map[string]any { + return map[string]any{ + "id": j.Id, + "type": j.Type, + "priority": j.Priority, + "create_at": j.CreateAt, + "start_at": j.StartAt, + "last_activity_at": j.LastActivityAt, + "status": j.Status, + "progress": j.Progress, + "data": j.Data, // TODO do we want this here + } +} + +func (j *Job) MarshalYAML() (any, error) { + return struct { + Id string `yaml:"id"` + Type string `yaml:"type"` + Priority int64 `yaml:"priority"` + CreateAt string `yaml:"create_at"` + StartAt string `yaml:"start_at"` + LastActivityAt string `yaml:"last_activity_at"` + Status string `yaml:"status"` + Progress int64 `yaml:"progress"` + Data StringMap `yaml:"data"` + }{ + Id: j.Id, + Type: j.Type, + Priority: j.Priority, + CreateAt: timeutils.FormatMillis(j.CreateAt), + StartAt: timeutils.FormatMillis(j.StartAt), + LastActivityAt: timeutils.FormatMillis(j.LastActivityAt), + Status: j.Status, + Progress: j.Progress, + Data: j.Data, + }, nil +} + +func (j *Job) UnmarshalYAML(unmarshal func(any) error) error { + out := struct { + Id string `yaml:"id"` + Type string `yaml:"type"` + Priority int64 `yaml:"priority"` + CreateAt string `yaml:"create_at"` + StartAt string `yaml:"start_at"` + LastActivityAt string `yaml:"last_activity_at"` + Status string `yaml:"status"` + Progress int64 `yaml:"progress"` + Data StringMap `yaml:"data"` + }{} + + err := unmarshal(&out) + if err != nil { + return err + } + + createAt, err := timeutils.ParseFormatedMillis(out.CreateAt) + if err != nil { + return err + } + updateAt, err := timeutils.ParseFormatedMillis(out.StartAt) + if err != nil { + return err + } + deleteAt, err := timeutils.ParseFormatedMillis(out.LastActivityAt) + if err != nil { + return err + } + + *j = Job{ + Id: out.Id, + Type: out.Type, + Priority: out.Priority, + CreateAt: createAt, + StartAt: updateAt, + LastActivityAt: deleteAt, + Status: out.Status, + Progress: out.Progress, + Data: out.Data, + } + return nil +} + +func (j *Job) IsValid() *AppError { + if !IsValidId(j.Id) { + return NewAppError("Job.IsValid", "model.job.is_valid.id.app_error", nil, "id="+j.Id, http.StatusBadRequest) + } + + if j.CreateAt == 0 { + return NewAppError("Job.IsValid", "model.job.is_valid.create_at.app_error", nil, "id="+j.Id, http.StatusBadRequest) + } + + validStatus := IsValidJobStatus(j.Status) + if !validStatus { + return NewAppError("Job.IsValid", "model.job.is_valid.status.app_error", nil, "id="+j.Id, http.StatusBadRequest) + } + + return nil +} + +func (j *Job) IsValidStatusChange(newStatus string) bool { + currentStatus := j.Status + + switch currentStatus { + case JobStatusInProgress: + return newStatus == JobStatusPending || newStatus == JobStatusCancelRequested + case JobStatusPending: + return newStatus == JobStatusCancelRequested + case JobStatusCancelRequested: + return newStatus == JobStatusCanceled + } + + return false +} + +func IsValidJobStatus(status string) bool { + switch status { + case JobStatusPending, + JobStatusInProgress, + JobStatusSuccess, + JobStatusError, + JobStatusWarning, + JobStatusCancelRequested, + JobStatusCanceled: + default: + return false + } + + return true +} + +func IsValidJobType(jobType string) bool { + for _, t := range AllJobTypes { + if t == jobType { + return true + } + } + + return false +} + +func (j *Job) LogClone() any { + return j.Auditable() +} + +type Worker interface { + Run() + Stop() + JobChannel() chan<- Job + IsEnabled(cfg *Config) bool +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/ldap.go b/vendor/github.com/mattermost/mattermost/server/public/model/ldap.go new file mode 100644 index 00000000..e4b0855f --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/ldap.go @@ -0,0 +1,51 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +const ( + UserAuthServiceLdap = "ldap" + LdapPublicCertificateName = "ldap-public.crt" + LdapPrivateKeyName = "ldap-private.key" +) + +// LdapDiagnosticTestType represents the type of LDAP diagnostic test to run +type LdapDiagnosticTestType string + +const ( + LdapDiagnosticTestTypeFilters LdapDiagnosticTestType = "filters" + LdapDiagnosticTestTypeAttributes LdapDiagnosticTestType = "attributes" + LdapDiagnosticTestTypeGroupAttributes LdapDiagnosticTestType = "group_attributes" +) + +// IsValid checks if the LdapDiagnosticTestType is valid +func (t LdapDiagnosticTestType) IsValid() bool { + switch t { + case LdapDiagnosticTestTypeFilters, LdapDiagnosticTestTypeAttributes, LdapDiagnosticTestTypeGroupAttributes: + return true + default: + return false + } +} + +// For Diagnostic results +type LdapDiagnosticResult struct { + TestName string `json:"test_name"` + TestValue string `json:"test_value"` + TotalCount int `json:"total_count"` + EntriesWithValue int `json:"entries_with_value"` // For Attributes + Message string `json:"message,omitempty"` + Error string `json:"error"` + SampleResults []LdapSampleEntry `json:"sample_results"` +} + +type LdapSampleEntry struct { + DN string `json:"dn"` + Username string `json:"username,omitempty"` + Email string `json:"email,omitempty"` + FirstName string `json:"first_name,omitempty"` + LastName string `json:"last_name,omitempty"` + ID string `json:"id,omitempty"` + DisplayName string `json:"display_name,omitempty"` // For groups + AvailableAttributes map[string]string `json:"available_attributes,omitempty"` +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/license.go b/vendor/github.com/mattermost/mattermost/server/public/model/license.go new file mode 100644 index 00000000..ec71b197 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/license.go @@ -0,0 +1,509 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "encoding/json" + "fmt" + "net/http" + "time" +) + +const ( + DayInSeconds = 24 * 60 * 60 + DayInMilliseconds = DayInSeconds * 1000 + + ExpiredLicenseError = "api.license.add_license.expired.app_error" + InvalidLicenseError = "api.license.add_license.invalid.app_error" + LicenseGracePeriod = DayInMilliseconds * 10 //10 days + LicenseRenewalLink = "https://mattermost.com/renew/" + + LicenseShortSkuE10 = "E10" + LicenseShortSkuE20 = "E20" + LicenseShortSkuProfessional = "professional" + LicenseShortSkuEnterprise = "enterprise" + LicenseShortSkuEnterpriseAdvanced = "advanced" + LicenseShortSkuMattermostEntry = "entry" + + ProfessionalTier = 10 + EnterpriseTier = 20 + + EntryTier = 30 + EnterpriseAdvancedTier = 30 +) + +var LicenseToLicenseTier = map[string]int{ + LicenseShortSkuProfessional: ProfessionalTier, + LicenseShortSkuEnterprise: EnterpriseTier, + LicenseShortSkuEnterpriseAdvanced: EnterpriseAdvancedTier, + LicenseShortSkuMattermostEntry: EntryTier, +} + +const ( + LicenseUpForRenewalEmailSent = "LicenseUpForRenewalEmailSent" +) + +var ( + trialDuration = 30*(time.Hour*24) + (time.Hour * 8) // 720 hours (30 days) + 8 hours is trial license duration + adminTrialDuration = 30*(time.Hour*24) + (time.Hour * 23) + (time.Minute * 59) + (time.Second * 59) // 720 hours (30 days) + 23 hours, 59 mins and 59 seconds + + // a sanctioned trial's duration is either more than the upper bound, + // or less than the lower bound + sanctionedTrialDurationLowerBound = 31*(time.Hour*24) + (time.Hour * 23) + (time.Minute * 59) + (time.Second * 59) // 744 hours (31 days) + 23 hours, 59 mins and 59 seconds + sanctionedTrialDurationUpperBound = 29*(time.Hour*24) + (time.Hour * 23) + (time.Minute * 59) + (time.Second * 59) // 696 hours (29 days) + 23 hours, 59 mins and 59 seconds +) + +type LicenseRecord struct { + Id string `json:"id"` + CreateAt int64 `json:"create_at"` + Bytes string `json:"-"` +} + +type LicenseLimits struct { + PostHistory int64 `json:"post_history"` + BoardCards int64 `json:"board_cards"` + PlaybookRuns int64 `json:"playbook_runs"` + CallDurationSeconds int64 `json:"call_duration"` + AgentsPrompts int64 `json:"agents_prompts"` + PushNotifications int64 `json:"push_notifications"` +} + +type License struct { + Id string `json:"id"` + IssuedAt int64 `json:"issued_at"` + StartsAt int64 `json:"starts_at"` + ExpiresAt int64 `json:"expires_at"` + Customer *Customer `json:"customer"` + Features *Features `json:"features"` + SkuName string `json:"sku_name"` + SkuShortName string `json:"sku_short_name"` + IsTrial bool `json:"is_trial"` + IsGovSku bool `json:"is_gov_sku"` + IsSeatCountEnforced bool `json:"is_seat_count_enforced"` + // ExtraUsers provides a grace mechanism that allows a configurable number of users + // beyond the base license limit before restricting user creation. When nil, defaults to 0. + // For example: 100 licensed users + 5 ExtraUsers = 105 total allowed users. + ExtraUsers *int `json:"extra_users"` + SignupJWT *string `json:"signup_jwt"` + Limits *LicenseLimits `json:"limits"` +} + +func (l *License) IsMattermostEntry() bool { + return l != nil && l.SkuShortName == LicenseShortSkuMattermostEntry +} + +type Customer struct { + Id string `json:"id"` + Name string `json:"name"` + Email string `json:"email"` + Company string `json:"company"` +} + +type TrialLicenseRequest struct { + ServerID string `json:"server_id"` + Email string `json:"email"` + Name string `json:"name"` + SiteURL string `json:"site_url"` + SiteName string `json:"site_name"` + Users int `json:"users"` + TermsAccepted bool `json:"terms_accepted"` + ReceiveEmailsAccepted bool `json:"receive_emails_accepted"` + ContactName string `json:"contact_name"` + ContactEmail string `json:"contact_email"` + CompanyName string `json:"company_name"` + CompanyCountry string `json:"company_country"` + CompanySize string `json:"company_size"` + ServerVersion string `json:"server_version"` +} + +// If any of the below fields are set, this is not a legacy request, and all fields should be validated +func (tlr *TrialLicenseRequest) IsLegacy() bool { + return tlr.CompanyCountry == "" && tlr.CompanyName == "" && tlr.CompanySize == "" && tlr.ContactName == "" +} + +func (tlr *TrialLicenseRequest) IsValid() bool { + if !tlr.TermsAccepted { + return false + } + + if tlr.Email == "" { + return false + } + + if tlr.Users <= 0 { + return false + } + + if tlr.CompanyCountry == "" { + return false + } + + if tlr.CompanyName == "" { + return false + } + + if tlr.CompanySize == "" { + return false + } + + if tlr.ContactName == "" { + return false + } + + return true +} + +type Features struct { + Users *int `json:"users"` + LDAP *bool `json:"ldap"` + LDAPGroups *bool `json:"ldap_groups"` + MFA *bool `json:"mfa"` + GoogleOAuth *bool `json:"google_oauth"` + Office365OAuth *bool `json:"office365_oauth"` + OpenId *bool `json:"openid"` + Compliance *bool `json:"compliance"` + Cluster *bool `json:"cluster"` + Metrics *bool `json:"metrics"` + MHPNS *bool `json:"mhpns"` + SAML *bool `json:"saml"` + Elasticsearch *bool `json:"elastic_search"` + Announcement *bool `json:"announcement"` + ThemeManagement *bool `json:"theme_management"` + EmailNotificationContents *bool `json:"email_notification_contents"` + DataRetention *bool `json:"data_retention"` + MessageExport *bool `json:"message_export"` + CustomPermissionsSchemes *bool `json:"custom_permissions_schemes"` + CustomTermsOfService *bool `json:"custom_terms_of_service"` + GuestAccounts *bool `json:"guest_accounts"` + GuestAccountsPermissions *bool `json:"guest_accounts_permissions"` + IDLoadedPushNotifications *bool `json:"id_loaded"` + LockTeammateNameDisplay *bool `json:"lock_teammate_name_display"` + EnterprisePlugins *bool `json:"enterprise_plugins"` + AdvancedLogging *bool `json:"advanced_logging"` + Cloud *bool `json:"cloud"` + SharedChannels *bool `json:"shared_channels"` + RemoteClusterService *bool `json:"remote_cluster_service"` + OutgoingOAuthConnections *bool `json:"outgoing_oauth_connections"` + + // after we enabled more features we'll need to control them with this + FutureFeatures *bool `json:"future_features"` +} + +func (f *Features) ToMap() map[string]any { + return map[string]any{ + "ldap": *f.LDAP, + "ldap_groups": *f.LDAPGroups, + "mfa": *f.MFA, + "google": *f.GoogleOAuth, + "office365": *f.Office365OAuth, + "openid": *f.OpenId, + "compliance": *f.Compliance, + "cluster": *f.Cluster, + "metrics": *f.Metrics, + "mhpns": *f.MHPNS, + "saml": *f.SAML, + "elastic_search": *f.Elasticsearch, + "email_notification_contents": *f.EmailNotificationContents, + "data_retention": *f.DataRetention, + "message_export": *f.MessageExport, + "custom_permissions_schemes": *f.CustomPermissionsSchemes, + "guest_accounts": *f.GuestAccounts, + "guest_accounts_permissions": *f.GuestAccountsPermissions, + "id_loaded": *f.IDLoadedPushNotifications, + "lock_teammate_name_display": *f.LockTeammateNameDisplay, + "enterprise_plugins": *f.EnterprisePlugins, + "advanced_logging": *f.AdvancedLogging, + "cloud": *f.Cloud, + "shared_channels": *f.SharedChannels, + "remote_cluster_service": *f.RemoteClusterService, + "future": *f.FutureFeatures, + "outgoing_oauth_connections": *f.OutgoingOAuthConnections, + } +} + +func (f *Features) SetDefaults() { + if f.FutureFeatures == nil { + f.FutureFeatures = NewPointer(true) + } + + if f.Users == nil { + f.Users = NewPointer(0) + } + + if f.LDAP == nil { + f.LDAP = NewPointer(*f.FutureFeatures) + } + + if f.LDAPGroups == nil { + f.LDAPGroups = NewPointer(*f.FutureFeatures) + } + + if f.MFA == nil { + f.MFA = NewPointer(*f.FutureFeatures) + } + + if f.GoogleOAuth == nil { + f.GoogleOAuth = NewPointer(*f.FutureFeatures) + } + + if f.Office365OAuth == nil { + f.Office365OAuth = NewPointer(*f.FutureFeatures) + } + + if f.OpenId == nil { + f.OpenId = NewPointer(*f.FutureFeatures) + } + + if f.Compliance == nil { + f.Compliance = NewPointer(*f.FutureFeatures) + } + + if f.Cluster == nil { + f.Cluster = NewPointer(*f.FutureFeatures) + } + + if f.Metrics == nil { + f.Metrics = NewPointer(*f.FutureFeatures) + } + + if f.MHPNS == nil { + f.MHPNS = NewPointer(*f.FutureFeatures) + } + + if f.SAML == nil { + f.SAML = NewPointer(*f.FutureFeatures) + } + + if f.Elasticsearch == nil { + f.Elasticsearch = NewPointer(*f.FutureFeatures) + } + + if f.Announcement == nil { + f.Announcement = NewPointer(true) + } + + if f.ThemeManagement == nil { + f.ThemeManagement = NewPointer(true) + } + + if f.EmailNotificationContents == nil { + f.EmailNotificationContents = NewPointer(*f.FutureFeatures) + } + + if f.DataRetention == nil { + f.DataRetention = NewPointer(*f.FutureFeatures) + } + + if f.MessageExport == nil { + f.MessageExport = NewPointer(*f.FutureFeatures) + } + + if f.CustomPermissionsSchemes == nil { + f.CustomPermissionsSchemes = NewPointer(*f.FutureFeatures) + } + + if f.GuestAccounts == nil { + f.GuestAccounts = NewPointer(*f.FutureFeatures) + } + + if f.GuestAccountsPermissions == nil { + f.GuestAccountsPermissions = NewPointer(*f.FutureFeatures) + } + + if f.CustomTermsOfService == nil { + f.CustomTermsOfService = NewPointer(*f.FutureFeatures) + } + + if f.IDLoadedPushNotifications == nil { + f.IDLoadedPushNotifications = NewPointer(*f.FutureFeatures) + } + + if f.LockTeammateNameDisplay == nil { + f.LockTeammateNameDisplay = NewPointer(*f.FutureFeatures) + } + + if f.EnterprisePlugins == nil { + f.EnterprisePlugins = NewPointer(*f.FutureFeatures) + } + + if f.AdvancedLogging == nil { + f.AdvancedLogging = NewPointer(*f.FutureFeatures) + } + + if f.Cloud == nil { + f.Cloud = NewPointer(false) + } + + if f.SharedChannels == nil { + f.SharedChannels = NewPointer(*f.FutureFeatures) + } + + if f.RemoteClusterService == nil { + f.RemoteClusterService = NewPointer(*f.FutureFeatures) + } + + if f.OutgoingOAuthConnections == nil { + f.OutgoingOAuthConnections = NewPointer(*f.FutureFeatures) + } +} + +func (l *License) IsExpired() bool { + return l.ExpiresAt < GetMillis() +} + +func (l *License) IsPastGracePeriod() bool { + timeDiff := GetMillis() - l.ExpiresAt + return timeDiff > LicenseGracePeriod +} + +func (l *License) IsWithinExpirationPeriod() bool { + days := l.DaysToExpiration() + return days <= 60 && days >= 58 +} + +func (l *License) DaysToExpiration() int { + dif := l.ExpiresAt - GetMillis() + d, _ := time.ParseDuration(fmt.Sprint(dif) + "ms") + days := d.Hours() / 24 + return int(days) +} + +func (l *License) IsStarted() bool { + return l.StartsAt < GetMillis() +} + +// Cloud preview is a cloud license, that is also a trial, and the difference between the start and end date is exactly 1 hour. +func (l *License) IsCloudPreview() bool { + return l.IsCloud() && l.IsTrialLicense() && l.ExpiresAt-l.StartsAt == 1*time.Hour.Milliseconds() +} + +func (l *License) IsCloud() bool { + return l != nil && l.Features != nil && l.Features.Cloud != nil && *l.Features.Cloud +} + +func (l *License) IsTrialLicense() bool { + return l.IsTrial || (l.ExpiresAt-l.StartsAt) == trialDuration.Milliseconds() || (l.ExpiresAt-l.StartsAt) == adminTrialDuration.Milliseconds() +} + +func (l *License) IsSanctionedTrial() bool { + duration := l.ExpiresAt - l.StartsAt + + return l.IsTrialLicense() && + (duration >= sanctionedTrialDurationLowerBound.Milliseconds() || duration <= sanctionedTrialDurationUpperBound.Milliseconds()) +} + +func (l *License) HasEnterpriseMarketplacePlugins() bool { + return *l.Features.EnterprisePlugins || + l.SkuShortName == LicenseShortSkuE20 || + MinimumProfessionalLicense(l) +} + +func (l *License) HasRemoteClusterService() bool { + if l == nil { + return false + } + + // If SharedChannels is enabled then RemoteClusterService must be enabled. + if l.HasSharedChannels() { + return true + } + + return (l.Features != nil && l.Features.RemoteClusterService != nil && *l.Features.RemoteClusterService) || + MinimumProfessionalLicense(l) +} + +func (l *License) HasSharedChannels() bool { + if l == nil { + return false + } + + return (l.Features != nil && l.Features.SharedChannels != nil && *l.Features.SharedChannels) || + MinimumProfessionalLicense(l) +} + +// NewTestLicense returns a license that expires in the future and has the given features. +func NewTestLicense(features ...string) *License { + ret := &License{ + ExpiresAt: GetMillis() + 90*DayInMilliseconds, + Customer: &Customer{ + Id: "some ID", + Email: "admin@example.com", + Name: "Main Contact Person", + Company: "My awesome Company", + }, + Features: &Features{}, + } + ret.Features.SetDefaults() + + featureMap := map[string]bool{} + for _, feature := range features { + featureMap[feature] = true + } + featureJson, _ := json.Marshal(featureMap) + json.Unmarshal(featureJson, &ret.Features) + + return ret +} + +// NewTestLicense returns a license that expires in the future and set as false the given features. +func NewTestLicenseWithFalseDefaults(features ...string) *License { + ret := &License{ + ExpiresAt: GetMillis() + 90*DayInMilliseconds, + Customer: &Customer{}, + Features: &Features{}, + } + ret.Features.SetDefaults() + + featureMap := map[string]bool{} + for _, feature := range features { + featureMap[feature] = false + } + featureJson, _ := json.Marshal(featureMap) + json.Unmarshal(featureJson, &ret.Features) + + return ret +} + +func NewTestLicenseSKU(skuShortName string, features ...string) *License { + lic := NewTestLicense(features...) + lic.SkuShortName = skuShortName + return lic +} + +func (lr *LicenseRecord) IsValid() *AppError { + if !IsValidId(lr.Id) { + return NewAppError("LicenseRecord.IsValid", "model.license_record.is_valid.id.app_error", nil, "", http.StatusBadRequest) + } + + if lr.CreateAt == 0 { + return NewAppError("LicenseRecord.IsValid", "model.license_record.is_valid.create_at.app_error", nil, "", http.StatusBadRequest) + } + + if lr.Bytes == "" || len(lr.Bytes) > 10000 { + return NewAppError("LicenseRecord.IsValid", "model.license_record.is_valid.bytes.app_error", nil, "", http.StatusBadRequest) + } + + return nil +} + +func (lr *LicenseRecord) PreSave() { + lr.CreateAt = GetMillis() +} + +// MinimumProfessionalLicense returns true if the provided license is at least a professional license. +// Higher tier licenses also satisfy the condition. +func MinimumProfessionalLicense(license *License) bool { + return license != nil && LicenseToLicenseTier[license.SkuShortName] >= ProfessionalTier +} + +// MinimumEnterpriseLicense returns true if the provided license is at least a enterprise license. +// Higher tier licenses also satisfy the condition. +func MinimumEnterpriseLicense(license *License) bool { + return license != nil && LicenseToLicenseTier[license.SkuShortName] >= EnterpriseTier +} + +// MinimumEnterpriseAdvancedLicense returns true if the provided license is at least an Enterprise Advanced license. +func MinimumEnterpriseAdvancedLicense(license *License) bool { + return license != nil && LicenseToLicenseTier[license.SkuShortName] >= EnterpriseAdvancedTier +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/limits.go b/vendor/github.com/mattermost/mattermost/server/public/model/limits.go new file mode 100644 index 00000000..2f4818fe --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/limits.go @@ -0,0 +1,13 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +type ServerLimits struct { + MaxUsersLimit int64 `json:"maxUsersLimit"` // soft limit for max number of users. + MaxUsersHardLimit int64 `json:"maxUsersHardLimit"` // hard limit for max number of active users. + ActiveUserCount int64 `json:"activeUserCount"` // actual number of active users on server. Active = non deleted + // Post history limit fields + PostHistoryLimit int64 `json:"postHistoryLimit"` // The actual message history limit value (0 if no limits) + LastAccessiblePostTime int64 `json:"lastAccessiblePostTime"` // Timestamp of the last accessible post (0 if no limits reached) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/link_metadata.go b/vendor/github.com/mattermost/mattermost/server/public/model/link_metadata.go new file mode 100644 index 00000000..4c53784e --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/link_metadata.go @@ -0,0 +1,199 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "encoding/binary" + "encoding/json" + "fmt" + "hash/fnv" + "net/http" + "time" + "unicode/utf8" + + "github.com/dyatlov/go-opengraph/opengraph" + "github.com/dyatlov/go-opengraph/opengraph/types/image" +) + +const ( + LinkMetadataTypeImage LinkMetadataType = "image" + LinkMetadataTypeNone LinkMetadataType = "none" + LinkMetadataTypeOpengraph LinkMetadataType = "opengraph" + LinkMetadataMaxImages int = 5 + LinkMetadataMaxURLLength int = 2048 // Maximum URL length in LinkMetadata table +) + +type LinkMetadataType string + +// LinkMetadata stores arbitrary data about a link posted in a message. This includes dimensions of linked images +// and OpenGraph metadata. +type LinkMetadata struct { + // Hash is a value computed from the URL and Timestamp for use as a primary key in the database. + Hash int64 + + URL string + Timestamp int64 + Type LinkMetadataType + + // Data is the actual metadata for the link. It should contain data of one of the following types: + // - *model.PostImage if the linked content is an image + // - *opengraph.OpenGraph if the linked content is an HTML document + // - nil if the linked content has no metadata + Data any +} + +// truncateText ensure string is 300 chars, truncate and add ellipsis +// if it was bigger. +func truncateText(original string) string { + if utf8.RuneCountInString(original) > 300 { + return fmt.Sprintf("%.300s[...]", original) + } + return original +} + +func firstNImages(images []*image.Image, maxImages int) []*image.Image { + if maxImages < 0 { // don't break stuff, if it's weird, go for sane defaults + maxImages = LinkMetadataMaxImages + } + numImages := len(images) + if numImages > maxImages { + return images[0:maxImages] + } + return images +} + +// TruncateOpenGraph ensure OpenGraph metadata doesn't grow too big by +// shortening strings, trimming fields and reducing the number of +// images. +func TruncateOpenGraph(ogdata *opengraph.OpenGraph) *opengraph.OpenGraph { + if ogdata != nil { + empty := &opengraph.OpenGraph{} + ogdata.Title = truncateText(ogdata.Title) + ogdata.Description = truncateText(ogdata.Description) + ogdata.SiteName = truncateText(ogdata.SiteName) + ogdata.Article = empty.Article + ogdata.Book = empty.Book + ogdata.Profile = empty.Profile + ogdata.Determiner = empty.Determiner + ogdata.Locale = empty.Locale + ogdata.LocalesAlternate = empty.LocalesAlternate + ogdata.Images = firstNImages(ogdata.Images, LinkMetadataMaxImages) + ogdata.Audios = empty.Audios + ogdata.Videos = empty.Videos + } + return ogdata +} + +func (o *LinkMetadata) PreSave() { + o.Hash = GenerateLinkMetadataHash(o.URL, o.Timestamp) +} + +func (o *LinkMetadata) IsValid() *AppError { + if o.URL == "" { + return NewAppError("LinkMetadata.IsValid", "model.link_metadata.is_valid.url.app_error", nil, "", http.StatusBadRequest) + } + + if len(o.URL) > LinkMetadataMaxURLLength { + return NewAppError("LinkMetadata.IsValid", "model.link_metadata.is_valid.url_length.app_error", map[string]any{"MaxLength": LinkMetadataMaxURLLength, "Length": len(o.URL)}, "", http.StatusBadRequest) + } + + if o.Timestamp == 0 || !isRoundedToNearestHour(o.Timestamp) { + return NewAppError("LinkMetadata.IsValid", "model.link_metadata.is_valid.timestamp.app_error", nil, "", http.StatusBadRequest) + } + + switch o.Type { + case LinkMetadataTypeImage: + if o.Data == nil { + return NewAppError("LinkMetadata.IsValid", "model.link_metadata.is_valid.data.app_error", nil, "", http.StatusBadRequest) + } + + if _, ok := o.Data.(*PostImage); !ok { + return NewAppError("LinkMetadata.IsValid", "model.link_metadata.is_valid.data_type.app_error", nil, "", http.StatusBadRequest) + } + case LinkMetadataTypeNone: + if o.Data != nil { + return NewAppError("LinkMetadata.IsValid", "model.link_metadata.is_valid.data_type.app_error", nil, "", http.StatusBadRequest) + } + case LinkMetadataTypeOpengraph: + if o.Data == nil { + return NewAppError("LinkMetadata.IsValid", "model.link_metadata.is_valid.data.app_error", nil, "", http.StatusBadRequest) + } + + if _, ok := o.Data.(*opengraph.OpenGraph); !ok { + return NewAppError("LinkMetadata.IsValid", "model.link_metadata.is_valid.data_type.app_error", nil, "", http.StatusBadRequest) + } + default: + return NewAppError("LinkMetadata.IsValid", "model.link_metadata.is_valid.type.app_error", nil, "", http.StatusBadRequest) + } + + return nil +} + +// DeserializeDataToConcreteType converts o.Data from JSON into properly structured data. This is intended to be used +// after getting a LinkMetadata object that has been stored in the database. +func (o *LinkMetadata) DeserializeDataToConcreteType() error { + var b []byte + switch t := o.Data.(type) { + case []byte: + // MySQL uses a byte slice for JSON + b = t + case string: + // Postgres uses a string for JSON + b = []byte(t) + } + + if b == nil { + // Data doesn't need to be fixed + return nil + } + + var data any + var err error + + switch o.Type { + case LinkMetadataTypeImage: + image := &PostImage{} + + err = json.Unmarshal(b, &image) + + data = image + case LinkMetadataTypeOpengraph: + og := &opengraph.OpenGraph{} + + json.Unmarshal(b, &og) + + data = og + } + + if err != nil { + return err + } + + o.Data = data + + return nil +} + +// FloorToNearestHour takes a timestamp (in milliseconds) and returns it rounded to the previous hour in UTC. +func FloorToNearestHour(ms int64) int64 { + t := time.Unix(0, ms*int64(1000*1000)).UTC() + + return time.Date(t.Year(), t.Month(), t.Day(), t.Hour(), 0, 0, 0, time.UTC).UnixNano() / int64(time.Millisecond) +} + +// isRoundedToNearestHour returns true if the given timestamp (in milliseconds) has been rounded to the nearest hour in UTC. +func isRoundedToNearestHour(ms int64) bool { + return FloorToNearestHour(ms) == ms +} + +// GenerateLinkMetadataHash generates a unique hash for a given URL and timestamp for use as a database key. +func GenerateLinkMetadataHash(url string, timestamp int64) int64 { + hash := fnv.New32() + + // Note that we ignore write errors here because the Hash interface says that its Write will never return an error + binary.Write(hash, binary.LittleEndian, timestamp) + hash.Write([]byte(url)) + + return int64(hash.Sum32()) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/manifest.go b/vendor/github.com/mattermost/mattermost/server/public/model/manifest.go new file mode 100644 index 00000000..a7f76893 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/manifest.go @@ -0,0 +1,498 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "encoding/json" + "fmt" + "io" + "os" + "path/filepath" + "strings" + + "github.com/blang/semver/v4" + "github.com/pkg/errors" + "gopkg.in/yaml.v3" +) + +type PluginOption struct { + // The display name for the option. + DisplayName string `json:"display_name" yaml:"display_name"` + + // The string value for the option. + Value string `json:"value" yaml:"value"` +} + +type PluginSettingType int + +const ( + Bool PluginSettingType = iota + Dropdown + Generated + Radio + Text + LongText + Number + Username + Custom +) + +type PluginSetting struct { + // The key that the setting will be assigned to in the configuration file. + Key string `json:"key" yaml:"key"` + + // The display name for the setting. + DisplayName string `json:"display_name" yaml:"display_name"` + + // The type of the setting. + // + // "bool" will result in a boolean true or false setting. + // + // "dropdown" will result in a string setting that allows the user to select from a list of + // pre-defined options. + // + // "generated" will result in a string setting that is set to a random, cryptographically secure + // string. + // + // "radio" will result in a string setting that allows the user to select from a short selection + // of pre-defined options. + // + // "text" will result in a string setting that can be typed in manually. + // + // "longtext" will result in a multi line string that can be typed in manually. + // + // "number" will result in integer setting that can be typed in manually. + // + // "username" will result in a text setting that will autocomplete to a username. + // + // "custom" will result in a custom defined setting and will load the custom component registered for the Web App System Console. + Type string `json:"type" yaml:"type"` + + // The help text to display to the user. Supports Markdown formatting. + HelpText string `json:"help_text" yaml:"help_text"` + + // The help text to display alongside the "Regenerate" button for settings of the "generated" type. + RegenerateHelpText string `json:"regenerate_help_text,omitempty" yaml:"regenerate_help_text,omitempty"` + + // The placeholder to display for "generated", "text", "longtext", "number" and "username" types when blank. + Placeholder string `json:"placeholder" yaml:"placeholder"` + + // The default value of the setting. + Default any `json:"default" yaml:"default"` + + // For "radio" or "dropdown" settings, this is the list of pre-defined options that the user can choose + // from. + Options []*PluginOption `json:"options,omitempty" yaml:"options,omitempty"` + + // The intended hosting environment for this plugin setting. Can be "cloud" or "on-prem". When this field is set, + // and the opposite environment is running the plugin, the setting will be hidden in the admin console UI. + // Note that this functionality is entirely client-side, so the plugin needs to handle the case of invalid submissions. + Hosting string `json:"hosting"` + + // If true, the setting is sanitized before showing it in the System Console or returning it via the API. + // This is useful for settings that contain sensitive information. + Secret bool `json:"secret"` +} + +type PluginSettingsSection struct { + // A unique identifier for this section. + Key string `json:"key" yaml:"key"` + + // Optional text to display as section title. + Title string `json:"title" yaml:"title"` + + // Optional text to display as section subtitle. + Subtitle string `json:"subtitle" yaml:"subtitle"` + + // A list of setting definitions to display inside the section. + Settings []*PluginSetting `json:"settings" yaml:"settings"` + + // Optional text to display above the settings. Supports Markdown formatting. + Header string `json:"header" yaml:"header"` + + // Optional text to display below the settings. Supports Markdown formatting. + Footer string `json:"footer" yaml:"footer"` + + // If true, the section will load the custom component registered using `registry.registerAdminConsoleCustomSection` + Custom bool `json:"custom" yaml:"custom"` + + // If true and Custom = true, the settings defined under this section will still render as fallback (unless the individual setting is type 'custom') when the plugin is disabled. + Fallback bool `json:"fallback" yaml:"fallback"` +} + +type PluginSettingsSchema struct { + // Optional text to display above the settings. Supports Markdown formatting. + Header string `json:"header" yaml:"header"` + + // Optional text to display below the settings. Supports Markdown formatting. + Footer string `json:"footer" yaml:"footer"` + + // A list of setting definitions. + Settings []*PluginSetting `json:"settings" yaml:"settings"` + + // A list of settings section definitions. + Sections []*PluginSettingsSection `json:"sections" yaml:"sections"` +} + +// The plugin manifest defines the metadata required to load and present your plugin. The manifest +// file should be named plugin.json or plugin.yaml and placed in the top of your +// plugin bundle. +// +// Example plugin.json: +// +// { +// "id": "com.mycompany.myplugin", +// "name": "My Plugin", +// "description": "This is my plugin", +// "homepage_url": "https://example.com", +// "support_url": "https://example.com/support", +// "release_notes_url": "https://example.com/releases/v0.0.1", +// "icon_path": "assets/logo.svg", +// "version": "0.1.0", +// "min_server_version": "5.6.0", +// "server": { +// "executables": { +// "linux-amd64": "server/dist/plugin-linux-amd64", +// "darwin-amd64": "server/dist/plugin-darwin-amd64", +// "windows-amd64": "server/dist/plugin-windows-amd64.exe" +// } +// }, +// "webapp": { +// "bundle_path": "webapp/dist/main.js" +// }, +// "settings_schema": { +// "header": "Some header text", +// "footer": "Some footer text", +// "settings": [{ +// "key": "someKey", +// "display_name": "Enable Extra Feature", +// "type": "bool", +// "help_text": "When true, an extra feature will be enabled!", +// "default": "false" +// }] +// }, +// "props": { +// "someKey": "someData" +// } +// } +type Manifest struct { + // The id is a globally unique identifier that represents your plugin. Ids must be at least + // 3 characters, at most 190 characters and must match ^[a-zA-Z0-9-_\.]+$. + // Reverse-DNS notation using a name you control is a good option, e.g. "com.mycompany.myplugin". + Id string `json:"id" yaml:"id"` + + // The name to be displayed for the plugin. + Name string `json:"name" yaml:"name"` + + // A description of what your plugin is and does. + Description string `json:"description,omitempty" yaml:"description,omitempty"` + + // HomepageURL is an optional link to learn more about the plugin. + HomepageURL string `json:"homepage_url,omitempty" yaml:"homepage_url,omitempty"` + + // SupportURL is an optional URL where plugin issues can be reported. + SupportURL string `json:"support_url,omitempty" yaml:"support_url,omitempty"` + + // ReleaseNotesURL is an optional URL where a changelog for the release can be found. + ReleaseNotesURL string `json:"release_notes_url,omitempty" yaml:"release_notes_url,omitempty"` + + // A relative file path in the bundle that points to the plugins svg icon for use with the Plugin Marketplace. + // This should be relative to the root of your bundle and the location of the manifest file. Bitmap image formats are not supported. + IconPath string `json:"icon_path,omitempty" yaml:"icon_path,omitempty"` + + // A version number for your plugin. Semantic versioning is recommended: http://semver.org + Version string `json:"version" yaml:"version"` + + // The minimum Mattermost server version required for your plugin. + // + // Minimum server version: 5.6 + MinServerVersion string `json:"min_server_version,omitempty" yaml:"min_server_version,omitempty"` + + // Server defines the server-side portion of your plugin. + Server *ManifestServer `json:"server,omitempty" yaml:"server,omitempty"` + + // If your plugin extends the web app, you'll need to define webapp. + Webapp *ManifestWebapp `json:"webapp,omitempty" yaml:"webapp,omitempty"` + + // To allow administrators to configure your plugin via the Mattermost system console, you can + // provide your settings schema. + SettingsSchema *PluginSettingsSchema `json:"settings_schema,omitempty" yaml:"settings_schema,omitempty"` + + // Plugins can store any kind of data in Props to allow other plugins to use it. + Props map[string]any `json:"props,omitempty" yaml:"props,omitempty"` +} + +type ManifestServer struct { + // Executables are the paths to your executable binaries, specifying multiple entry + // points for different platforms when bundled together in a single plugin. + Executables map[string]string `json:"executables,omitempty" yaml:"executables,omitempty"` + + // Executable is the path to your executable binary. This should be relative to the root + // of your bundle and the location of the manifest file. + // + // On Windows, this file must have a ".exe" extension. + // + // If your plugin is compiled for multiple platforms, consider bundling them together + // and using the Executables field instead. + Executable string `json:"executable" yaml:"executable"` +} + +type ManifestWebapp struct { + // The path to your webapp bundle. This should be relative to the root of your bundle and the + // location of the manifest file. + BundlePath string `json:"bundle_path" yaml:"bundle_path"` + + // BundleHash is the 64-bit FNV-1a hash of the webapp bundle, computed when the plugin is loaded + BundleHash []byte `json:"-"` +} + +func (m *Manifest) HasClient() bool { + return m.Webapp != nil +} + +func (m *Manifest) ClientManifest() *Manifest { + cm := new(Manifest) + *cm = *m + cm.Name = "" + cm.Description = "" + cm.Server = nil + if cm.Webapp != nil { + cm.Webapp = new(ManifestWebapp) + *cm.Webapp = *m.Webapp + cm.Webapp.BundlePath = "/static/" + m.Id + "/" + fmt.Sprintf("%s_%x_bundle.js", m.Id, m.Webapp.BundleHash) + } + return cm +} + +// GetExecutableForRuntime returns the path to the executable for the given runtime architecture. +// +// If the manifest defines multiple executables, but none match, or if only a single executable +// is defined, the Executable field will be returned. This method does not guarantee that the +// resulting binary can actually execute on the given platform. +func (m *Manifest) GetExecutableForRuntime(goOs, goArch string) string { + server := m.Server + + if server == nil { + return "" + } + + var executable string + if len(server.Executables) > 0 { + osArch := fmt.Sprintf("%s-%s", goOs, goArch) + executable = server.Executables[osArch] + } + + if executable == "" { + executable = server.Executable + } + + return executable +} + +func (m *Manifest) HasServer() bool { + return m.Server != nil +} + +func (m *Manifest) HasWebapp() bool { + return m.Webapp != nil +} + +func (m *Manifest) MeetMinServerVersion(serverVersion string) (bool, error) { + minServerVersion, err := semver.Parse(m.MinServerVersion) + if err != nil { + return false, errors.New("failed to parse MinServerVersion") + } + sv := semver.MustParse(serverVersion) + if sv.LT(minServerVersion) { + return false, nil + } + return true, nil +} + +func (m *Manifest) IsValid() error { + if !IsValidPluginId(m.Id) { + return errors.New("invalid plugin ID") + } + + if strings.TrimSpace(m.Name) == "" { + return errors.New("a plugin name is needed") + } + + if m.HomepageURL != "" && !IsValidHTTPURL(m.HomepageURL) { + return errors.New("invalid HomepageURL") + } + + if m.SupportURL != "" && !IsValidHTTPURL(m.SupportURL) { + return errors.New("invalid SupportURL") + } + + if m.ReleaseNotesURL != "" && !IsValidHTTPURL(m.ReleaseNotesURL) { + return errors.New("invalid ReleaseNotesURL") + } + + if m.Version != "" { + _, err := semver.Parse(m.Version) + if err != nil { + return errors.Wrap(err, "failed to parse Version") + } + } + + if m.MinServerVersion != "" { + _, err := semver.Parse(m.MinServerVersion) + if err != nil { + return errors.Wrap(err, "failed to parse MinServerVersion") + } + } + + if m.SettingsSchema != nil { + err := m.SettingsSchema.isValid() + if err != nil { + return errors.Wrap(err, "invalid settings schema") + } + } + + return nil +} + +func (s *PluginSettingsSchema) isValid() error { + for _, setting := range s.Settings { + err := setting.isValid() + if err != nil { + return err + } + } + + for _, section := range s.Sections { + if err := section.IsValid(); err != nil { + return err + } + } + + return nil +} + +func (s *PluginSettingsSection) IsValid() error { + if s.Key == "" { + return errors.New("invalid empty Key") + } + + for _, setting := range s.Settings { + err := setting.isValid() + if err != nil { + return err + } + } + + return nil +} + +func (s *PluginSetting) isValid() error { + pluginSettingType, err := convertTypeToPluginSettingType(s.Type) + if err != nil { + return err + } + + if s.RegenerateHelpText != "" && pluginSettingType != Generated { + return errors.New("should not set RegenerateHelpText for setting type that is not generated") + } + + if s.Placeholder != "" && !(pluginSettingType == Generated || + pluginSettingType == Text || + pluginSettingType == LongText || + pluginSettingType == Number || + pluginSettingType == Username || + pluginSettingType == Custom) { + return errors.New("should not set Placeholder for setting type not in text, generated, number, username, or custom") + } + + if s.Options != nil { + if pluginSettingType != Radio && pluginSettingType != Dropdown { + return errors.New("should not set Options for setting type not in radio or dropdown") + } + + for _, option := range s.Options { + if option.DisplayName == "" || option.Value == "" { + return errors.New("should not have empty Displayname or Value for any option") + } + } + } + + return nil +} + +func convertTypeToPluginSettingType(t string) (PluginSettingType, error) { + var settingType PluginSettingType + switch t { + case "bool": + return Bool, nil + case "dropdown": + return Dropdown, nil + case "generated": + return Generated, nil + case "radio": + return Radio, nil + case "text": + return Text, nil + case "number": + return Number, nil + case "longtext": + return LongText, nil + case "username": + return Username, nil + case "custom": + return Custom, nil + default: + return settingType, errors.New("invalid setting type: " + t) + } +} + +// FindManifest will find and parse the manifest in a given directory. +// +// In all cases other than a does-not-exist error, path is set to the path of the manifest file that was +// found. +// +// Manifests are JSON or YAML files named plugin.json, plugin.yaml, or plugin.yml. +func FindManifest(dir string) (manifest *Manifest, path string, err error) { + for _, name := range []string{"plugin.yml", "plugin.yaml"} { + path = filepath.Join(dir, name) + f, ferr := os.Open(path) + if ferr != nil { + if !os.IsNotExist(ferr) { + return nil, "", ferr + } + continue + } + b, ioerr := io.ReadAll(f) + f.Close() + if ioerr != nil { + return nil, path, ioerr + } + var parsed Manifest + err = yaml.Unmarshal(b, &parsed) + if err != nil { + return nil, path, err + } + manifest = &parsed + manifest.Id = strings.ToLower(manifest.Id) + return manifest, path, nil + } + + path = filepath.Join(dir, "plugin.json") + f, ferr := os.Open(path) + if ferr != nil { + if os.IsNotExist(ferr) { + path = "" + } + return nil, path, ferr + } + defer f.Close() + var parsed Manifest + err = json.NewDecoder(f).Decode(&parsed) + if err != nil { + return nil, path, err + } + manifest = &parsed + manifest.Id = strings.ToLower(manifest.Id) + return manifest, path, nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/map.go b/vendor/github.com/mattermost/mattermost/server/public/model/map.go new file mode 100644 index 00000000..d1bad276 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/map.go @@ -0,0 +1,12 @@ +package model + +import ( + "reflect" + "testing" + + "github.com/stretchr/testify/assert" +) + +func AssertNotSameMap[K comparable, V any](t *testing.T, a, b map[K]V) { + assert.False(t, reflect.ValueOf(a).UnsafePointer() == reflect.ValueOf(b).UnsafePointer()) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/marketplace_plugin.go b/vendor/github.com/mattermost/mattermost/server/public/model/marketplace_plugin.go new file mode 100644 index 00000000..1d19515d --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/marketplace_plugin.go @@ -0,0 +1,129 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "bytes" + "encoding/base64" + "encoding/json" + "io" + "net/url" + "strconv" + + "github.com/pkg/errors" +) + +// BaseMarketplacePlugin is a Mattermost plugin received from the Marketplace server. +type BaseMarketplacePlugin struct { + HomepageURL string `json:"homepage_url"` + IconData string `json:"icon_data"` + DownloadURL string `json:"download_url"` + ReleaseNotesURL string `json:"release_notes_url"` + Labels []MarketplaceLabel `json:"labels,omitempty"` + Hosting string `json:"hosting"` // Indicated if the plugin is limited to a certain hosting type + AuthorType string `json:"author_type"` // The maintainer of the plugin + ReleaseStage string `json:"release_stage"` // The stage in the software release cycle that the plugin is in + Enterprise bool `json:"enterprise"` // Indicated if the plugin is an enterprise plugin + Signature string `json:"signature"` // Signature represents a signature of a plugin saved in base64 encoding. + Manifest *Manifest `json:"manifest"` +} + +// MarketplaceLabel represents a label shown in the Marketplace UI. +type MarketplaceLabel struct { + Name string `json:"name"` + Description string `json:"description"` + URL string `json:"url"` + Color string `json:"color"` +} + +// MarketplacePlugin is a state aware Marketplace plugin. +type MarketplacePlugin struct { + *BaseMarketplacePlugin + InstalledVersion string `json:"installed_version"` +} + +// BaseMarketplacePluginsFromReader decodes a json-encoded list of plugins from the given io.Reader. +func BaseMarketplacePluginsFromReader(reader io.Reader) ([]*BaseMarketplacePlugin, error) { + plugins := []*BaseMarketplacePlugin{} + decoder := json.NewDecoder(reader) + + if err := decoder.Decode(&plugins); err != nil && err != io.EOF { + return nil, err + } + + return plugins, nil +} + +// MarketplacePluginsFromReader decodes a json-encoded list of plugins from the given io.Reader. +func MarketplacePluginsFromReader(reader io.Reader) ([]*MarketplacePlugin, error) { + plugins := []*MarketplacePlugin{} + decoder := json.NewDecoder(reader) + + if err := decoder.Decode(&plugins); err != nil && err != io.EOF { + return nil, err + } + + return plugins, nil +} + +// DecodeSignature Decodes signature and returns ReadSeeker. +func (plugin *BaseMarketplacePlugin) DecodeSignature() (io.ReadSeeker, error) { + signatureBytes, err := base64.StdEncoding.DecodeString(plugin.Signature) + if err != nil { + return nil, errors.Wrap(err, "Unable to decode base64 signature.") + } + return bytes.NewReader(signatureBytes), nil +} + +// MarketplacePluginFilter describes the parameters to request a list of plugins. +type MarketplacePluginFilter struct { + Page int + PerPage int + Filter string + ServerVersion string + BuildEnterpriseReady bool + EnterprisePlugins bool + Cloud bool + LocalOnly bool + Platform string + PluginId string + ReturnAllVersions bool + RemoteOnly bool +} + +// ApplyToURL modifies the given url to include query string parameters for the request. +func (filter *MarketplacePluginFilter) ApplyToURL(u *url.URL) { + q := u.Query() + q.Add("page", strconv.Itoa(filter.Page)) + if filter.PerPage > 0 { + q.Add("per_page", strconv.Itoa(filter.PerPage)) + } + q.Add("filter", filter.Filter) + q.Add("server_version", filter.ServerVersion) + q.Add("build_enterprise_ready", strconv.FormatBool(filter.BuildEnterpriseReady)) + q.Add("enterprise_plugins", strconv.FormatBool(filter.EnterprisePlugins)) + q.Add("cloud", strconv.FormatBool(filter.Cloud)) + q.Add("local_only", strconv.FormatBool(filter.LocalOnly)) + q.Add("remote_only", strconv.FormatBool(filter.RemoteOnly)) + q.Add("platform", filter.Platform) + q.Add("plugin_id", filter.PluginId) + q.Add("return_all_versions", strconv.FormatBool(filter.ReturnAllVersions)) + u.RawQuery = q.Encode() +} + +// InstallMarketplacePluginRequest struct describes parameters of the requested plugin. +type InstallMarketplacePluginRequest struct { + Id string `json:"id"` + Version string `json:"version"` +} + +// PluginRequestFromReader decodes a json-encoded plugin request from the given io.Reader. +func PluginRequestFromReader(reader io.Reader) (*InstallMarketplacePluginRequest, error) { + var r *InstallMarketplacePluginRequest + err := json.NewDecoder(reader).Decode(&r) + if err != nil { + return nil, err + } + return r, nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/member_invite.go b/vendor/github.com/mattermost/mattermost/server/public/model/member_invite.go new file mode 100644 index 00000000..4e4e59d5 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/member_invite.go @@ -0,0 +1,56 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "encoding/json" + "net/http" +) + +type MemberInvite struct { + Emails []string `json:"emails"` + ChannelIds []string `json:"channelIds,omitempty"` + Message string `json:"message"` +} + +func (i *MemberInvite) Auditable() map[string]any { + return map[string]any{ + "emails": i.Emails, + "channel_ids": i.ChannelIds, + } +} + +// IsValid validates that the invitation info is loaded correctly and with the correct structure +func (i *MemberInvite) IsValid() *AppError { + if len(i.Emails) == 0 { + return NewAppError("MemberInvite.IsValid", "model.member.is_valid.emails.app_error", nil, "", http.StatusBadRequest) + } + + if len(i.ChannelIds) > 0 { + for _, channel := range i.ChannelIds { + if len(channel) != 26 { + return NewAppError("MemberInvite.IsValid", "model.member.is_valid.channel.app_error", nil, "channel="+channel, http.StatusBadRequest) + } + } + } + + return nil +} + +func (i *MemberInvite) UnmarshalJSON(b []byte) error { + var emails []string + if err := json.Unmarshal(b, &emails); err == nil { + *i = MemberInvite{} + i.Emails = emails + return nil + } + + type TempMemberInvite MemberInvite + var o2 TempMemberInvite + if err := json.Unmarshal(b, &o2); err != nil { + return err + } + *i = MemberInvite(o2) + return nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/mention_map.go b/vendor/github.com/mattermost/mattermost/server/public/model/mention_map.go new file mode 100644 index 00000000..2f3444dd --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/mention_map.go @@ -0,0 +1,80 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "fmt" + "net/url" +) + +type UserMentionMap map[string]string +type ChannelMentionMap map[string]string + +const ( + userMentionsKey = "user_mentions" + userMentionsIdsKey = "user_mentions_ids" + channelMentionsKey = "channel_mentions" + channelMentionsIdsKey = "channel_mentions_ids" +) + +func UserMentionMapFromURLValues(values url.Values) (UserMentionMap, error) { + return mentionsFromURLValues(values, userMentionsKey, userMentionsIdsKey) +} + +func (m UserMentionMap) ToURLValues() url.Values { + return mentionsToURLValues(m, userMentionsKey, userMentionsIdsKey) +} + +func ChannelMentionMapFromURLValues(values url.Values) (ChannelMentionMap, error) { + return mentionsFromURLValues(values, channelMentionsKey, channelMentionsIdsKey) +} + +func (m ChannelMentionMap) ToURLValues() url.Values { + return mentionsToURLValues(m, channelMentionsKey, channelMentionsIdsKey) +} + +func mentionsFromURLValues(values url.Values, mentionKey, idKey string) (map[string]string, error) { + mentions, mentionsOk := values[mentionKey] + ids, idsOk := values[idKey] + + if !mentionsOk && !idsOk { + return map[string]string{}, nil + } + + if !mentionsOk { + return nil, fmt.Errorf("%s key not found", mentionKey) + } + + if !idsOk { + return nil, fmt.Errorf("%s key not found", idKey) + } + + if len(mentions) != len(ids) { + return nil, fmt.Errorf("keys %s and %s have different length", mentionKey, idKey) + } + + mentionsMap := make(map[string]string) + for i, mention := range mentions { + id := ids[i] + + if oldId, ok := mentionsMap[mention]; ok && oldId != id { + return nil, fmt.Errorf("key %s has two different values: %s and %s", mention, oldId, id) + } + + mentionsMap[mention] = id + } + + return mentionsMap, nil +} + +func mentionsToURLValues(mentions map[string]string, mentionKey, idKey string) url.Values { + values := url.Values{} + + for mention, id := range mentions { + values.Add(mentionKey, mention) + values.Add(idKey, id) + } + + return values +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/message_export.go b/vendor/github.com/mattermost/mattermost/server/public/model/message_export.go new file mode 100644 index 00000000..192ddcfe --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/message_export.go @@ -0,0 +1,54 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import "encoding/json" + +type MessageExport struct { + TeamId *string + TeamName *string + TeamDisplayName *string + + ChannelId *string + ChannelName *string + ChannelDisplayName *string + ChannelType *ChannelType + + UserId *string + UserEmail *string + Username *string + IsBot bool + + PostId *string + PostCreateAt *int64 + PostUpdateAt *int64 + PostDeleteAt *int64 + PostEditAt *int64 + PostMessage *string + PostType *string + PostRootId *string + PostProps *string + PostOriginalId *string + PostFileIds StringArray +} + +// MessageExportCursor retrieves posts in the inclusive range: +// [LastPostUpdateAt + LastPostId, UntilUpdateAt] +type MessageExportCursor struct { + LastPostUpdateAt int64 + LastPostId string + UntilUpdateAt int64 +} + +// PreviewID returns the value of the post's previewed_post prop, if present, or an empty string. +func (m *MessageExport) PreviewID() string { + var previewID string + props := map[string]any{} + if m.PostProps != nil && json.Unmarshal([]byte(*m.PostProps), &props) == nil { + if val, ok := props[PostPropsPreviewedPost]; ok { + previewID = val.(string) + } + } + return previewID +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/metrics.go b/vendor/github.com/mattermost/mattermost/server/public/model/metrics.go new file mode 100644 index 00000000..8dd0dc60 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/metrics.go @@ -0,0 +1,157 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "fmt" + "strings" + + "github.com/blang/semver/v4" +) + +type MetricType string + +const ( + ClientTimeToFirstByte MetricType = "TTFB" + ClientTimeToLastByte MetricType = "TTLB" + ClientTimeToDOMInteractive MetricType = "dom_interactive" + ClientSplashScreenEnd MetricType = "splash_screen" + ClientFirstContentfulPaint MetricType = "FCP" + ClientLargestContentfulPaint MetricType = "LCP" + ClientInteractionToNextPaint MetricType = "INP" + ClientCumulativeLayoutShift MetricType = "CLS" + ClientLongTasks MetricType = "long_tasks" + ClientPageLoadDuration MetricType = "page_load" + ClientChannelSwitchDuration MetricType = "channel_switch" + ClientTeamSwitchDuration MetricType = "team_switch" + ClientRHSLoadDuration MetricType = "rhs_load" + ClientGlobalThreadsLoadDuration MetricType = "global_threads_load" + + MobileClientLoadDuration MetricType = "mobile_load" + MobileClientChannelSwitchDuration MetricType = "mobile_channel_switch" + MobileClientTeamSwitchDuration MetricType = "mobile_team_switch" + MobileClientNetworkRequestsAverageSpeed MetricType = "mobile_network_requests_average_speed" + MobileClientNetworkRequestsEffectiveLatency MetricType = "mobile_network_requests_effective_latency" + MobileClientNetworkRequestsElapsedTime MetricType = "mobile_network_requests_elapsed_time" + MobileClientNetworkRequestsLatency MetricType = "mobile_network_requests_latency" + MobileClientNetworkRequestsTotalCompressedSize MetricType = "mobile_network_requests_total_compressed_size" + MobileClientNetworkRequestsTotalParallelRequests MetricType = "mobile_network_requests_total_parallel_requests" + MobileClientNetworkRequestsTotalRequests MetricType = "mobile_network_requests_total_requests" + MobileClientNetworkRequestsTotalSequentialRequests MetricType = "mobile_network_requests_total_sequential_requests" + MobileClientNetworkRequestsTotalSize MetricType = "mobile_network_requests_total_size" + + DesktopClientCPUUsage MetricType = "desktop_cpu" + DesktopClientMemoryUsage MetricType = "desktop_memory" + + performanceReportTTLMilliseconds = 300 * 1000 // 300 seconds/5 minutes +) + +var ( + performanceReportVersion = semver.MustParse("0.1.0") + acceptedPlatforms = SliceToMapKey("linux", "macos", "ios", "android", "windows", "other") + acceptedAgents = SliceToMapKey("desktop", "firefox", "chrome", "safari", "edge", "other") + + AcceptedInteractions = SliceToMapKey("keyboard", "pointer", "other") + AcceptedLCPRegions = SliceToMapKey( + "post", + "post_textbox", + "channel_sidebar", + "team_sidebar", + "channel_header", + "global_header", + "announcement_bar", + "center_channel", + "modal_content", + "other", + ) + AcceptedTrueFalseLabels = SliceToMapKey("true", "false") + AcceptedSplashScreenOrigins = SliceToMapKey("root", "team_controller") + AcceptedNetworkRequestGroups = SliceToMapKey( + "Cold Start", + "Cold Start Deferred", + "DeepLink", + "DeepLink Deferred", + "Login", + "Login Deferred", + "Notification", + "Notification Deferred", + "Server Switch", + "Server Switch Deferred", + "WebSocket Reconnect", + "WebSocket Reconnect Deferred", + ) +) + +type MetricSample struct { + Metric MetricType `json:"metric"` + Value float64 `json:"value"` + Labels map[string]string `json:"labels,omitempty"` +} + +func (s *MetricSample) GetLabelValue(name string, acceptedValues map[string]any, defaultValue string) string { + return processLabel(s.Labels, name, acceptedValues, defaultValue) +} + +// PerformanceReport is a set of samples collected from a client +type PerformanceReport struct { + Version string `json:"version"` + ClientID string `json:"client_id"` + Labels map[string]string `json:"labels"` + Start float64 `json:"start"` + End float64 `json:"end"` + Counters []*MetricSample `json:"counters"` + Histograms []*MetricSample `json:"histograms"` +} + +func (r *PerformanceReport) IsValid() error { + if r == nil { + return fmt.Errorf("the report is nil") + } + + reportVersion, err := semver.ParseTolerant(r.Version) + if err != nil { + return fmt.Errorf("could not parse semver version: %s, %w", r.Version, err) + } + + if reportVersion.Major != performanceReportVersion.Major || reportVersion.Minor > performanceReportVersion.Minor { + return fmt.Errorf("report version is not supported: server version: %s, report version: %s", performanceReportVersion.String(), r.Version) + } + + if r.Start > r.End { + return fmt.Errorf("report timestamps are erroneous: start_timestamp %f is greater than end_timestamp %f", r.Start, r.End) + } + + now := GetMillis() + if r.End < float64(now-performanceReportTTLMilliseconds) { + return fmt.Errorf("report is outdated: end_time %f is past %d ms from now", r.End, performanceReportTTLMilliseconds) + } + + return nil +} + +func (r *PerformanceReport) ProcessLabels() map[string]string { + return map[string]string{ + "platform": processLabel(r.Labels, "platform", acceptedPlatforms, "other"), + "agent": processLabel(r.Labels, "agent", acceptedAgents, "other"), + "desktop_app_version": r.Labels["desktop_app_version"], + "network_request_group": processLabel(r.Labels, "network_request_group", AcceptedNetworkRequestGroups, "Login"), + } +} + +func processLabel(labels map[string]string, name string, acceptedValues map[string]any, defaultValue string) string { + // check if the label is specified + value, ok := labels[name] + if !ok { + return defaultValue + } + value = strings.ToLower(value) + + // check if the value is one that we accept + _, ok = acceptedValues[value] + if !ok { + return defaultValue + } + + return value +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/mfa_secret.go b/vendor/github.com/mattermost/mattermost/server/public/model/mfa_secret.go new file mode 100644 index 00000000..8cfa675e --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/mfa_secret.go @@ -0,0 +1,9 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +type MfaSecret struct { + Secret string `json:"secret"` + QRCode string `json:"qr_code"` +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/migration.go b/vendor/github.com/mattermost/mattermost/server/public/model/migration.go new file mode 100644 index 00000000..a77785d6 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/migration.go @@ -0,0 +1,60 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +const ( + AdvancedPermissionsMigrationKey = "AdvancedPermissionsMigrationComplete" + MigrationKeyAdvancedPermissionsPhase2 = "migration_advanced_permissions_phase_2" + + MigrationKeyEmojiPermissionsSplit = "emoji_permissions_split" + MigrationKeyWebhookPermissionsSplit = "webhook_permissions_split" + MigrationKeyIntegrationsOwnPermissions = "integrations_own_permissions" + MigrationKeyListJoinPublicPrivateTeams = "list_join_public_private_teams" + MigrationKeyRemovePermanentDeleteUser = "remove_permanent_delete_user" + MigrationKeyAddBotPermissions = "add_bot_permissions" + MigrationKeyApplyChannelManageDeleteToChannelUser = "apply_channel_manage_delete_to_channel_user" + MigrationKeyRemoveChannelManageDeleteFromTeamUser = "remove_channel_manage_delete_from_team_user" + MigrationKeyViewMembersNewPermission = "view_members_new_permission" + MigrationKeyAddManageGuestsPermissions = "add_manage_guests_permissions" + MigrationKeyChannelModerationsPermissions = "channel_moderations_permissions" + MigrationKeyAddUseGroupMentionsPermission = "add_use_group_mentions_permission" + MigrationKeyAddSystemConsolePermissions = "add_system_console_permissions" + MigrationKeySidebarCategoriesPhase2 = "migration_sidebar_categories_phase_2" + MigrationKeyAddConvertChannelPermissions = "add_convert_channel_permissions" + MigrationKeyAddSystemRolesPermissions = "add_system_roles_permissions" + MigrationKeyAddBillingPermissions = "add_billing_permissions" + MigrationKeyAddManageSharedChannelPermissions = "manage_shared_channel_permissions" + MigrationKeyAddManageSecureConnectionsPermissions = "manage_secure_connections_permissions" + MigrationKeyAddDownloadComplianceExportResults = "download_compliance_export_results" + MigrationKeyAddComplianceSubsectionPermissions = "compliance_subsection_permissions" + MigrationKeyAddExperimentalSubsectionPermissions = "experimental_subsection_permissions" + MigrationKeyAddAuthenticationSubsectionPermissions = "authentication_subsection_permissions" + MigrationKeyAddSiteSubsectionPermissions = "site_subsection_permissions" + MigrationKeyAddEnvironmentSubsectionPermissions = "environment_subsection_permissions" + MigrationKeyAddReportingSubsectionPermissions = "reporting_subsection_permissions" + MigrationKeyAddTestEmailAncillaryPermission = "test_email_ancillary_permission" + MigrationKeyAddAboutSubsectionPermissions = "about_subsection_permissions" + MigrationKeyAddIntegrationsSubsectionPermissions = "integrations_subsection_permissions" + MigrationKeyAddPlaybooksPermissions = "playbooks_permissions" + MigrationKeyAddCustomUserGroupsPermissions = "custom_groups_permissions" + MigrationKeyAddPlayboosksManageRolesPermissions = "playbooks_manage_roles" + MigrationKeyAddProductsBoardsPermissions = "products_boards" + MigrationKeyAddCustomUserGroupsPermissionRestore = "custom_groups_permission_restore" + MigrationKeyAddReadChannelContentPermissions = "read_channel_content_permissions" + MigrationKeyS3Path = "s3_path_migration" + MigrationKeyDeleteEmptyDrafts = "delete_empty_drafts_migration" + MigrationKeyDeleteOrphanDrafts = "delete_orphan_drafts_migration" + MigrationKeyAddIPFilteringPermissions = "add_ip_filtering_permissions" + MigrationKeyAddOutgoingOAuthConnectionsPermissions = "add_outgoing_oauth_connections_permissions" + MigrationKeyAddChannelBookmarksPermissions = "add_channel_bookmarks_permissions" + MigrationKeyDeleteDmsPreferences = "delete_dms_preferences_migration" + MigrationKeyAddManageJobAncillaryPermissions = "add_manage_jobs_ancillary_permissions" + MigrationKeyAddUploadFilePermission = "add_upload_file_permission" + RestrictAccessToChannelConversionToPublic = "restrict_access_to_channel_conversion_to_public_permissions" + MigrationKeyFixReadAuditsPermission = "fix_read_audits_permission" + MigrationRemoveGetAnalyticsPermission = "remove_get_analytics_permission" + MigrationAddSysconsoleMobileSecurityPermission = "add_sysconsole_mobile_security_permission" + MigrationKeyAddChannelBannerPermissions = "add_channel_banner_permissions" + MigrationKeyAddChannelAccessRulesPermission = "add_channel_access_rules_permission" +) diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/notification.go b/vendor/github.com/mattermost/mattermost/server/public/model/notification.go new file mode 100644 index 00000000..da23f132 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/notification.go @@ -0,0 +1,43 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +type NotificationStatus string +type NotificationType string +type NotificationReason string + +const ( + NotificationStatusSuccess NotificationStatus = "success" + NotificationStatusError NotificationStatus = "error" + NotificationStatusNotSent NotificationStatus = "not_sent" + NotificationStatusUnsupported NotificationStatus = "unsupported" + + NotificationTypeAll NotificationType = "all" + NotificationTypeEmail NotificationType = "email" + NotificationTypeWebsocket NotificationType = "websocket" + NotificationTypePush NotificationType = "push" + + NotificationNoPlatform = "no_platform" + + NotificationReasonFetchError NotificationReason = "fetch_error" + NotificationReasonParseError NotificationReason = "json_parse_error" + NotificationReasonMarshalError NotificationReason = "json_marshal_error" + NotificationReasonPushProxyError NotificationReason = "push_proxy_error" + NotificationReasonPushProxySendError NotificationReason = "push_proxy_send_error" + NotificationReasonPushProxyRemoveDevice NotificationReason = "push_proxy_remove_device" + NotificationReasonRejectedByPlugin NotificationReason = "rejected_by_plugin" + NotificationReasonSessionExpired NotificationReason = "session_expired" + NotificationReasonChannelMuted NotificationReason = "channel_muted" + NotificationReasonSystemMessage NotificationReason = "system_message" + NotificationReasonLevelSetToNone NotificationReason = "notify_level_none" + NotificationReasonNotMentioned NotificationReason = "not_mentioned" + NotificationReasonUserStatus NotificationReason = "user_status" + NotificationReasonUserIsActive NotificationReason = "user_is_active" + NotificationReasonMissingProfile NotificationReason = "missing_profile" + NotificationReasonEmailNotVerified NotificationReason = "email_not_verified" + NotificationReasonEmailSendError NotificationReason = "email_send_error" + NotificationReasonTooManyUsersInChannel NotificationReason = "too_many_users_in_channel" + NotificationReasonResolvePersistentNotificationError NotificationReason = "resolve_persistent_notification_error" + NotificationReasonMissingThreadMembership NotificationReason = "missing_thread_membership" +) diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/notify_admin.go b/vendor/github.com/mattermost/mattermost/server/public/model/notify_admin.go new file mode 100644 index 00000000..6d496e5f --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/notify_admin.go @@ -0,0 +1,82 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "database/sql" + "fmt" + "net/http" + "strings" +) + +type MattermostFeature string + +const ( + PaidFeatureGuestAccounts = MattermostFeature("mattermost.feature.guest_accounts") + PaidFeatureCustomUsergroups = MattermostFeature("mattermost.feature.custom_user_groups") + PaidFeatureCreateMultipleTeams = MattermostFeature("mattermost.feature.create_multiple_teams") + PaidFeatureStartcall = MattermostFeature("mattermost.feature.start_call") + PaidFeaturePlaybooksRetrospective = MattermostFeature("mattermost.feature.playbooks_retro") + PaidFeatureUnlimitedMessages = MattermostFeature("mattermost.feature.unlimited_messages") + PaidFeatureUnlimitedFileStorage = MattermostFeature("mattermost.feature.unlimited_file_storage") + PaidFeatureAllProfessionalfeatures = MattermostFeature("mattermost.feature.all_professional") + PaidFeatureAllEnterprisefeatures = MattermostFeature("mattermost.feature.all_enterprise") + UpgradeDowngradedWorkspace = MattermostFeature("mattermost.feature.upgrade_downgraded_workspace") + PluginFeature = MattermostFeature("mattermost.feature.plugin") + PaidFeatureHighlightWithoutNotification = MattermostFeature("mattermost.feature.highlight_without_notification") +) + +var validSKUs = map[string]struct{}{ + LicenseShortSkuProfessional: {}, + LicenseShortSkuEnterprise: {}, +} + +// These are the features a non admin would typically ping an admin about +var paidFeatures = map[MattermostFeature]struct{}{ + PaidFeatureGuestAccounts: {}, + PaidFeatureCustomUsergroups: {}, + PaidFeatureCreateMultipleTeams: {}, + PaidFeatureStartcall: {}, + PaidFeaturePlaybooksRetrospective: {}, + PaidFeatureUnlimitedMessages: {}, + PaidFeatureUnlimitedFileStorage: {}, + PaidFeatureAllProfessionalfeatures: {}, + PaidFeatureAllEnterprisefeatures: {}, + UpgradeDowngradedWorkspace: {}, + PaidFeatureHighlightWithoutNotification: {}, +} + +type NotifyAdminToUpgradeRequest struct { + TrialNotification bool `json:"trial_notification"` + RequiredPlan string `json:"required_plan"` + RequiredFeature MattermostFeature `json:"required_feature"` +} + +type NotifyAdminData struct { + CreateAt int64 `json:"create_at,omitempty"` + UserId string `json:"user_id"` + RequiredPlan string `json:"required_plan"` + RequiredFeature MattermostFeature `json:"required_feature"` + Trial bool `json:"trial"` + SentAt sql.NullInt64 `json:"sent_at"` +} + +func (nad *NotifyAdminData) IsValid() *AppError { + if strings.HasPrefix(string(nad.RequiredFeature), string(PluginFeature)) { + return nil + } + if _, planOk := validSKUs[nad.RequiredPlan]; !planOk { + return NewAppError("NotifyAdmin.IsValid", fmt.Sprintf("Invalid plan, %s provided", nad.RequiredPlan), nil, "", http.StatusBadRequest) + } + + if _, featureOk := paidFeatures[nad.RequiredFeature]; !featureOk { + return NewAppError("NotifyAdmin.IsValid", fmt.Sprintf("Invalid feature, %s provided", nad.RequiredFeature), nil, "", http.StatusBadRequest) + } + + return nil +} + +func (nad *NotifyAdminData) PreSave() { + nad.CreateAt = GetMillis() +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/oauth.go b/vendor/github.com/mattermost/mattermost/server/public/model/oauth.go new file mode 100644 index 00000000..e5000ac8 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/oauth.go @@ -0,0 +1,267 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "crypto/subtle" + "fmt" + "net/http" + "slices" + "unicode/utf8" +) + +const ( + OAuthActionSignup = "signup" + OAuthActionLogin = "login" + OAuthActionEmailToSSO = "email_to_sso" + OAuthActionSSOToEmail = "sso_to_email" + OAuthActionMobile = "mobile" +) + +type OAuthApp struct { + Id string `json:"id"` + CreatorId string `json:"creator_id"` + CreateAt int64 `json:"create_at"` + UpdateAt int64 `json:"update_at"` + ClientSecret string `json:"client_secret"` + Name string `json:"name"` + Description string `json:"description"` + IconURL string `json:"icon_url"` + CallbackUrls StringArray `json:"callback_urls"` + Homepage string `json:"homepage"` + IsTrusted bool `json:"is_trusted"` + MattermostAppID string `json:"mattermost_app_id"` + + IsDynamicallyRegistered bool `json:"is_dynamically_registered,omitempty"` +} + +// OAuthAppRequest represents the request body for creating an OAuth app +type OAuthAppRequest struct { + Name string `json:"name"` + Description string `json:"description"` + IconURL string `json:"icon_url"` + CallbackUrls StringArray `json:"callback_urls"` + Homepage string `json:"homepage"` + IsTrusted bool `json:"is_trusted"` + IsPublic bool `json:"is_public"` +} + +func (a *OAuthApp) Auditable() map[string]any { + return map[string]any{ + "id": a.Id, + "creator_id": a.CreatorId, + "create_at": a.CreateAt, + "update_at": a.UpdateAt, + "name": a.Name, + "description": a.Description, + "icon_url": a.IconURL, + "callback_urls:": a.CallbackUrls, + "homepage": a.Homepage, + "is_trusted": a.IsTrusted, + "mattermost_app_id": a.MattermostAppID, + "token_endpoint_auth_method": a.GetTokenEndpointAuthMethod(), + "is_dynamically_registered": a.IsDynamicallyRegistered, + } +} + +func (a *OAuthApp) IsValid() *AppError { + if !IsValidId(a.Id) { + return NewAppError("OAuthApp.IsValid", "model.oauth.is_valid.app_id.app_error", nil, "", http.StatusBadRequest) + } + + if a.CreateAt == 0 { + return NewAppError("OAuthApp.IsValid", "model.oauth.is_valid.create_at.app_error", nil, "app_id="+a.Id, http.StatusBadRequest) + } + + if a.UpdateAt == 0 { + return NewAppError("OAuthApp.IsValid", "model.oauth.is_valid.update_at.app_error", nil, "app_id="+a.Id, http.StatusBadRequest) + } + + if !IsValidId(a.CreatorId) && !a.IsDynamicallyRegistered { + return NewAppError("OAuthApp.IsValid", "model.oauth.is_valid.creator_id.app_error", nil, "app_id="+a.Id, http.StatusBadRequest) + } + + // Validate client secret length if present + if a.ClientSecret != "" && len(a.ClientSecret) > 128 { + return NewAppError("OAuthApp.IsValid", "model.oauth.is_valid.client_secret.app_error", nil, "app_id="+a.Id, http.StatusBadRequest) + } + + if a.Name == "" || len(a.Name) > 64 { + return NewAppError("OAuthApp.IsValid", "model.oauth.is_valid.name.app_error", nil, "app_id="+a.Id, http.StatusBadRequest) + } + + if len(a.CallbackUrls) == 0 || len(fmt.Sprintf("%s", a.CallbackUrls)) > 1024 { + return NewAppError("OAuthApp.IsValid", "model.oauth.is_valid.callback.app_error", nil, "app_id="+a.Id, http.StatusBadRequest) + } + + for _, callback := range a.CallbackUrls { + if !IsValidHTTPURL(callback) { + return NewAppError("OAuthApp.IsValid", "model.oauth.is_valid.callback.app_error", nil, "", http.StatusBadRequest) + } + } + + if a.Homepage == "" && !a.IsDynamicallyRegistered { + return NewAppError("OAuthApp.IsValid", "model.oauth.is_valid.homepage.app_error", nil, "app_id="+a.Id, http.StatusBadRequest) + } + + if a.Homepage != "" && (len(a.Homepage) > 256 || !IsValidHTTPURL(a.Homepage)) { + return NewAppError("OAuthApp.IsValid", "model.oauth.is_valid.homepage.app_error", nil, "app_id="+a.Id, http.StatusBadRequest) + } + + if utf8.RuneCountInString(a.Description) > 512 { + return NewAppError("OAuthApp.IsValid", "model.oauth.is_valid.description.app_error", nil, "app_id="+a.Id, http.StatusBadRequest) + } + + if a.IconURL != "" { + if len(a.IconURL) > 512 || !IsValidHTTPURL(a.IconURL) { + return NewAppError("OAuthApp.IsValid", "model.oauth.is_valid.icon_url.app_error", nil, "app_id="+a.Id, http.StatusBadRequest) + } + } + + if len(a.MattermostAppID) > 32 { + return NewAppError("OAuthApp.IsValid", "model.oauth.is_valid.mattermost_app_id.app_error", nil, "app_id="+a.Id, http.StatusBadRequest) + } + + return nil +} + +// PreSave will set the Id and ClientSecret if missing. It will also fill +// in the CreateAt, UpdateAt times. It should be run before saving the app to the db. +func (a *OAuthApp) PreSave() { + if a.Id == "" { + a.Id = NewId() + } + + // PreSave no longer generates client secrets - callers must explicitly set ClientSecret + // if they want to create a confidential client + + a.CreateAt = GetMillis() + a.UpdateAt = a.CreateAt +} + +// PreUpdate should be run before updating the app in the db. +func (a *OAuthApp) PreUpdate() { + a.UpdateAt = GetMillis() +} + +// Generate a valid strong etag so the browser can cache the results +func (a *OAuthApp) Etag() string { + return Etag(a.Id, a.UpdateAt) +} + +// Remove any private data from the app object +func (a *OAuthApp) Sanitize() { + a.ClientSecret = "" +} + +func (a *OAuthApp) IsValidRedirectURL(url string) bool { + return slices.Contains(a.CallbackUrls, url) +} + +// GetTokenEndpointAuthMethod returns the OAuth token endpoint authentication method +// based on whether the client has a secret +func (a *OAuthApp) GetTokenEndpointAuthMethod() string { + if a.ClientSecret == "" { + return ClientAuthMethodNone + } + return ClientAuthMethodClientSecretPost +} + +// IsPublicClient returns true if this is a public client (uses "none" auth method) +func (a *OAuthApp) IsPublicClient() bool { + return a.GetTokenEndpointAuthMethod() == ClientAuthMethodNone +} + +// ValidateForGrantType validates the OAuth app for a specific grant type and provided credentials +func (a *OAuthApp) ValidateForGrantType(grantType, clientSecret, codeVerifier string) *AppError { + if a.IsPublicClient() { + return a.validatePublicClientGrant(grantType, clientSecret, codeVerifier) + } + return a.validateConfidentialClientGrant(grantType, clientSecret) +} + +// validatePublicClientGrant validates that public client requests follow OAuth 2.1 security requirements +func (a *OAuthApp) validatePublicClientGrant(grantType, clientSecret, codeVerifier string) *AppError { + // Public clients must not provide a client secret + if clientSecret != "" { + return NewAppError("OAuthApp.validatePublicClientGrant", "model.oauth.validate_grant.public_client_secret.app_error", nil, "app_id="+a.Id, http.StatusBadRequest) + } + + // Public clients cannot use refresh token grant type + if grantType == RefreshTokenGrantType { + return NewAppError("OAuthApp.validatePublicClientGrant", "model.oauth.validate_grant.public_client_refresh_token.app_error", nil, "app_id="+a.Id, http.StatusBadRequest) + } + + // Public clients must use PKCE for authorization code grant + if grantType == AccessTokenGrantType && codeVerifier == "" { + return NewAppError("OAuthApp.validatePublicClientGrant", "model.oauth.validate_grant.pkce_required.app_error", nil, "app_id="+a.Id, http.StatusBadRequest) + } + + return nil +} + +// validateConfidentialClientGrant validates confidential client authentication +func (a *OAuthApp) validateConfidentialClientGrant(grantType, clientSecret string) *AppError { + // Confidential clients must provide correct client secret + if subtle.ConstantTimeCompare([]byte(a.ClientSecret), []byte(clientSecret)) == 0 { + return NewAppError("OAuthApp.validateConfidentialClientGrant", "model.oauth.validate_grant.credentials.app_error", nil, "app_id="+a.Id, http.StatusUnauthorized) + } + + return nil +} + +func NewOAuthAppFromClientRegistration(req *ClientRegistrationRequest, creatorId string) *OAuthApp { + app := &OAuthApp{ + CreatorId: creatorId, + CallbackUrls: req.RedirectURIs, + IsDynamicallyRegistered: true, + } + + if req.ClientName != nil { + app.Name = *req.ClientName + } else { + app.Name = "Dynamically Registered Client" + } + + // Generate client secret based on requested auth method, default to confidential client + requestedAuthMethod := ClientAuthMethodClientSecretPost + if req.TokenEndpointAuthMethod != nil { + requestedAuthMethod = *req.TokenEndpointAuthMethod + } + + if requestedAuthMethod != ClientAuthMethodNone { + app.ClientSecret = NewId() + } + + if req.ClientURI != nil { + app.Homepage = *req.ClientURI + } + + return app +} + +func (a *OAuthApp) ToClientRegistrationResponse(siteURL string) *ClientRegistrationResponse { + resp := &ClientRegistrationResponse{ + ClientID: a.Id, + RedirectURIs: a.CallbackUrls, + TokenEndpointAuthMethod: a.GetTokenEndpointAuthMethod(), + GrantTypes: GetDefaultGrantTypes(), + ResponseTypes: GetDefaultResponseTypes(), + Scope: ScopeUser, + } + + if !a.IsPublicClient() { + resp.ClientSecret = &a.ClientSecret + } + + if a.Name != "" { + resp.ClientName = &a.Name + } + + if a.Homepage != "" { + resp.ClientURI = &a.Homepage + } + + return resp +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/oauth_dcr.go b/vendor/github.com/mattermost/mattermost/server/public/model/oauth_dcr.go new file mode 100644 index 00000000..265797ce --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/oauth_dcr.go @@ -0,0 +1,84 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "net/http" +) + +type ClientRegistrationRequest struct { + RedirectURIs []string `json:"redirect_uris"` + TokenEndpointAuthMethod *string `json:"token_endpoint_auth_method,omitempty"` + ClientName *string `json:"client_name,omitempty"` + ClientURI *string `json:"client_uri,omitempty"` +} + +type ClientRegistrationResponse struct { + ClientID string `json:"client_id"` + ClientSecret *string `json:"client_secret,omitempty"` + RedirectURIs []string `json:"redirect_uris"` + TokenEndpointAuthMethod string `json:"token_endpoint_auth_method"` + GrantTypes []string `json:"grant_types"` + ResponseTypes []string `json:"response_types"` + Scope string `json:"scope,omitempty"` + ClientName *string `json:"client_name,omitempty"` + ClientURI *string `json:"client_uri,omitempty"` +} + +const ( + DCRErrorInvalidRedirectURI = "invalid_redirect_uri" + DCRErrorInvalidClientMetadata = "invalid_client_metadata" + DCRErrorUnsupportedOperation = "unsupported_operation" +) + +type DCRError struct { + Error string `json:"error"` + ErrorDescription string `json:"error_description,omitempty"` +} + +func (r *ClientRegistrationRequest) IsValid() *AppError { + if len(r.RedirectURIs) == 0 { + return NewAppError("ClientRegistrationRequest.IsValid", "model.dcr.is_valid.redirect_uris.app_error", nil, "", http.StatusBadRequest) + } + + for _, uri := range r.RedirectURIs { + if !IsValidHTTPURL(uri) { + return NewAppError("ClientRegistrationRequest.IsValid", "model.dcr.is_valid.redirect_uri_format.app_error", nil, "uri="+uri, http.StatusBadRequest) + } + } + + if r.ClientName != nil && len(*r.ClientName) > 64 { + return NewAppError("ClientRegistrationRequest.IsValid", "model.dcr.is_valid.client_name.app_error", nil, "", http.StatusBadRequest) + } + + if r.ClientURI != nil { + if !IsValidHTTPURL(*r.ClientURI) { + return NewAppError("ClientRegistrationRequest.IsValid", "model.dcr.is_valid.client_uri_format.app_error", nil, "uri="+*r.ClientURI, http.StatusBadRequest) + } + if len(*r.ClientURI) > 256 { + return NewAppError("ClientRegistrationRequest.IsValid", "model.dcr.is_valid.client_uri_length.app_error", nil, "", http.StatusBadRequest) + } + } + + if r.TokenEndpointAuthMethod != nil && *r.TokenEndpointAuthMethod != ClientAuthMethodClientSecretPost && *r.TokenEndpointAuthMethod != ClientAuthMethodNone { + return NewAppError("ClientRegistrationRequest.IsValid", "model.dcr.is_valid.unsupported_auth_method.app_error", nil, "method="+*r.TokenEndpointAuthMethod, http.StatusBadRequest) + } + + return nil +} + +func NewDCRError(errorType, description string) *DCRError { + return &DCRError{ + Error: errorType, + ErrorDescription: description, + } +} + +func GetDefaultGrantTypes() []string { + return []string{GrantTypeAuthorizationCode, GrantTypeRefreshToken} +} + +func GetDefaultResponseTypes() []string { + return []string{ResponseTypeCode} +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/oauth_metadata.go b/vendor/github.com/mattermost/mattermost/server/public/model/oauth_metadata.go new file mode 100644 index 00000000..7fc7e451 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/oauth_metadata.go @@ -0,0 +1,71 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import "net/url" + +type AuthorizationServerMetadata struct { + Issuer string `json:"issuer"` + AuthorizationEndpoint string `json:"authorization_endpoint,omitempty"` + TokenEndpoint string `json:"token_endpoint,omitempty"` + ResponseTypesSupported []string `json:"response_types_supported"` + RegistrationEndpoint string `json:"registration_endpoint,omitempty"` + ScopesSupported []string `json:"scopes_supported,omitempty"` + GrantTypesSupported []string `json:"grant_types_supported,omitempty"` + TokenEndpointAuthMethodsSupported []string `json:"token_endpoint_auth_methods_supported,omitempty"` + CodeChallengeMethodsSupported []string `json:"code_challenge_methods_supported,omitempty"` +} + +const ( + GrantTypeAuthorizationCode = "authorization_code" + GrantTypeRefreshToken = "refresh_token" + + ResponseTypeCode = "code" + + ClientAuthMethodNone = "none" + ClientAuthMethodClientSecretPost = "client_secret_post" + + ScopeUser = "user" +) + +const ( + OAuthAuthorizeEndpoint = "/oauth/authorize" + OAuthAccessTokenEndpoint = "/oauth/access_token" + OAuthDeauthorizeEndpoint = "/oauth/deauthorize" + OAuthAppsRegisterEndpoint = "/api/v4/oauth/apps/register" + OAuthMetadataEndpoint = "/.well-known/oauth-authorization-server" +) + +func GetDefaultMetadata(siteURL string) (*AuthorizationServerMetadata, error) { + authorizationEndpoint, err := url.JoinPath(siteURL, OAuthAuthorizeEndpoint) + if err != nil { + return nil, err + } + tokenEndpoint, err := url.JoinPath(siteURL, OAuthAccessTokenEndpoint) + if err != nil { + return nil, err + } + return &AuthorizationServerMetadata{ + Issuer: siteURL, + AuthorizationEndpoint: authorizationEndpoint, + TokenEndpoint: tokenEndpoint, + ResponseTypesSupported: []string{ + ResponseTypeCode, + }, + GrantTypesSupported: []string{ + GrantTypeAuthorizationCode, + GrantTypeRefreshToken, + }, + TokenEndpointAuthMethodsSupported: []string{ + ClientAuthMethodNone, // Public clients (PKCE) + ClientAuthMethodClientSecretPost, // Confidential clients + }, + ScopesSupported: []string{ + ScopeUser, + }, + CodeChallengeMethodsSupported: []string{ + PKCECodeChallengeMethodS256, // S256 method supported for optional PKCE + }, + }, nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/onboarding.go b/vendor/github.com/mattermost/mattermost/server/public/model/onboarding.go new file mode 100644 index 00000000..ea80bbff --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/onboarding.go @@ -0,0 +1,32 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "encoding/json" + "io" +) + +// CompleteOnboardingRequest describes parameters of the requested plugin. +type CompleteOnboardingRequest struct { + Organization string `json:"organization"` // Organization is the name of the organization + InstallPlugins []string `json:"install_plugins"` // InstallPlugins is a list of plugins to be installed +} + +func (r *CompleteOnboardingRequest) Auditable() map[string]any { + return map[string]any{ + "install_plugins": r.InstallPlugins, + } +} + +// CompleteOnboardingRequest decodes a json-encoded request from the given io.Reader. +func CompleteOnboardingRequestFromReader(reader io.Reader) (*CompleteOnboardingRequest, error) { + var r *CompleteOnboardingRequest + err := json.NewDecoder(reader).Decode(&r) + if err != nil { + return nil, err + } + + return r, nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/outgoing_oauth_connection.go b/vendor/github.com/mattermost/mattermost/server/public/model/outgoing_oauth_connection.go new file mode 100644 index 00000000..2c1363cd --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/outgoing_oauth_connection.go @@ -0,0 +1,230 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "fmt" + "net/http" + "net/url" + "unicode/utf8" +) + +type OutgoingOAuthConnectionGrantType string + +func (gt OutgoingOAuthConnectionGrantType) IsValid() bool { + return gt == OutgoingOAuthConnectionGrantTypeClientCredentials || gt == OutgoingOAuthConnectionGrantTypePassword +} + +const ( + OutgoingOAuthConnectionGrantTypeClientCredentials OutgoingOAuthConnectionGrantType = "client_credentials" + OutgoingOAuthConnectionGrantTypePassword OutgoingOAuthConnectionGrantType = "password" + + defaultGetConnectionsLimit = 50 +) + +type OutgoingOAuthConnection struct { + Id string `json:"id"` + CreatorId string `json:"creator_id"` + CreateAt int64 `json:"create_at"` + UpdateAt int64 `json:"update_at"` + Name string `json:"name"` + ClientId string `json:"client_id,omitempty"` + ClientSecret string `json:"client_secret,omitempty"` + CredentialsUsername *string `json:"credentials_username,omitempty"` + CredentialsPassword *string `json:"credentials_password,omitempty"` + OAuthTokenURL string `json:"oauth_token_url"` + GrantType OutgoingOAuthConnectionGrantType `json:"grant_type"` + Audiences StringArray `json:"audiences"` +} + +func (oa *OutgoingOAuthConnection) Auditable() map[string]any { + return map[string]any{ + "id": oa.Id, + "creator_id": oa.CreatorId, + "create_at": oa.CreateAt, + "update_at": oa.UpdateAt, + "name": oa.Name, + "grant_type": oa.GrantType, + } +} + +// Sanitize removes any sensitive fields from the OutgoingOAuthConnection object. +func (oa *OutgoingOAuthConnection) Sanitize() { + oa.ClientSecret = "" + oa.CredentialsPassword = nil +} + +// Patch updates the OutgoingOAuthConnection object with the non-empty fields from the given connection. +func (oa *OutgoingOAuthConnection) Patch(conn *OutgoingOAuthConnection) { + if conn == nil { + return + } + + if conn.Name != "" { + oa.Name = conn.Name + } + if conn.ClientId != "" { + oa.ClientId = conn.ClientId + } + if conn.ClientSecret != "" { + oa.ClientSecret = conn.ClientSecret + } + if conn.OAuthTokenURL != "" { + oa.OAuthTokenURL = conn.OAuthTokenURL + } + if conn.GrantType != "" { + oa.GrantType = conn.GrantType + } + if len(conn.Audiences) > 0 { + oa.Audiences = conn.Audiences + } + if conn.CredentialsUsername != nil { + oa.CredentialsUsername = conn.CredentialsUsername + } + if conn.CredentialsPassword != nil { + oa.CredentialsPassword = conn.CredentialsPassword + } +} + +// IsValid validates the object and returns an error if it isn't properly configured +func (oa *OutgoingOAuthConnection) IsValid() *AppError { + if !IsValidId(oa.Id) { + return NewAppError("OutgoingOAuthConnection.IsValid", "model.outgoing_oauth_connection.is_valid.id.error", nil, "", http.StatusBadRequest) + } + + if oa.CreateAt == 0 { + return NewAppError("OutgoingOAuthConnection.IsValid", "model.outgoing_oauth_connection.is_valid.create_at.error", nil, "id="+oa.Id, http.StatusBadRequest) + } + + if oa.UpdateAt == 0 { + return NewAppError("OutgoingOAuthConnection.IsValid", "model.outgoing_oauth_connection.is_valid.update_at.error", nil, "id="+oa.Id, http.StatusBadRequest) + } + + if !IsValidId(oa.CreatorId) { + return NewAppError("OutgoingOAuthConnection.IsValid", "model.outgoing_oauth_connection.is_valid.creator_id.error", nil, "id="+oa.Id, http.StatusBadRequest) + } + + if oa.Name == "" || utf8.RuneCountInString(oa.Name) > 64 { + return NewAppError("OutgoingOAuthConnection.IsValid", "model.outgoing_oauth_connection.is_valid.name.error", nil, "id="+oa.Id, http.StatusBadRequest) + } + + if oa.ClientId == "" || utf8.RuneCountInString(oa.ClientId) > 255 { + return NewAppError("OutgoingOAuthConnection.IsValid", "model.outgoing_oauth_connection.is_valid.client_id.error", nil, "id="+oa.Id, http.StatusBadRequest) + } + + if oa.ClientSecret == "" || utf8.RuneCountInString(oa.ClientSecret) > 255 { + return NewAppError("OutgoingOAuthConnection.IsValid", "model.outgoing_oauth_connection.is_valid.client_secret.error", nil, "id="+oa.Id, http.StatusBadRequest) + } + + if !IsValidHTTPURL(oa.OAuthTokenURL) || utf8.RuneCountInString(oa.OAuthTokenURL) > 256 { + return NewAppError("OutgoingOAuthConnection.IsValid", "model.outgoing_oauth_connection.is_valid.oauth_token_url.error", nil, "id="+oa.Id, http.StatusBadRequest) + } + + if err := oa.HasValidGrantType(); err != nil { + return err + } + + if len(oa.Audiences) == 0 { + return NewAppError("OutgoingOAuthConnection.IsValid", "model.outgoing_oauth_connection.is_valid.audience.empty", nil, "id="+oa.Id, http.StatusBadRequest) + } + + if len(oa.Audiences) > 0 { + for _, audience := range oa.Audiences { + if !IsValidHTTPURL(audience) { + return NewAppError("OutgoingOAuthConnection.IsValid", "model.outgoing_oauth_connection.is_valid.audience.error", map[string]any{"Url": audience}, "id="+oa.Id, http.StatusBadRequest) + } + } + } + + return nil +} + +// HasValidGrantType validates the grant type and its parameters returning an error if it isn't properly configured +func (oa *OutgoingOAuthConnection) HasValidGrantType() *AppError { + if !oa.GrantType.IsValid() { + return NewAppError("OutgoingOAuthConnection.IsValid", "model.outgoing_oauth_connection.is_valid.grant_type.error", nil, "id="+oa.Id, http.StatusBadRequest) + } + + if oa.GrantType == OutgoingOAuthConnectionGrantTypePassword && (oa.CredentialsUsername == nil || oa.CredentialsPassword == nil) { + return NewAppError("OutgoingOAuthConnection.IsValid", "model.outgoing_oauth_connection.is_valid.password_credentials.error", nil, "id="+oa.Id, http.StatusBadRequest) + } + + if oa.GrantType == OutgoingOAuthConnectionGrantTypePassword && (*oa.CredentialsUsername == "" || *oa.CredentialsPassword == "") { + return NewAppError("OutgoingOAuthConnection.IsValid", "model.outgoing_oauth_connection.is_valid.password_credentials.error", nil, "id="+oa.Id, http.StatusBadRequest) + } + + return nil +} + +// PreSave will set the Id if empty, ensuring the object has one and the create/update times. +func (oa *OutgoingOAuthConnection) PreSave() { + if oa.Id == "" { + oa.Id = NewId() + } + + oa.CreateAt = GetMillis() + oa.UpdateAt = oa.CreateAt +} + +// PreUpdate will set the update time to now. +func (oa *OutgoingOAuthConnection) PreUpdate() { + oa.UpdateAt = GetMillis() +} + +// Etag returns the ETag for the cache. +func (oa *OutgoingOAuthConnection) Etag() string { + return Etag(oa.Id, oa.UpdateAt) +} + +// OutgoingOAuthConnectionGetConnectionsFilter is used to filter outgoing connections +type OutgoingOAuthConnectionGetConnectionsFilter struct { + OffsetId string + Limit int + Audience string + + // TeamId is not used as a filter but as a way to check if the current user has permission to + // access the outgoing oauth connection for the given team in order to use them in the slash + // commands and outgoing webhooks. + TeamId string +} + +// SetDefaults sets the default values for the filter +func (oaf *OutgoingOAuthConnectionGetConnectionsFilter) SetDefaults() { + if oaf.Limit == 0 { + oaf.Limit = defaultGetConnectionsLimit + } +} + +// ToURLValues converts the filter to url.Values +func (oaf *OutgoingOAuthConnectionGetConnectionsFilter) ToURLValues() url.Values { + v := url.Values{} + + if oaf.Limit > 0 { + v.Set("limit", fmt.Sprintf("%d", oaf.Limit)) + } + + if oaf.OffsetId != "" { + v.Set("offset_id", oaf.OffsetId) + } + + if oaf.Audience != "" { + v.Set("audience", oaf.Audience) + } + + if oaf.TeamId != "" { + v.Set("team_id", oaf.TeamId) + } + return v +} + +// OutgoingOAuthConnectionToken is used to return the token for an outgoing connection oauth +// authentication request +type OutgoingOAuthConnectionToken struct { + AccessToken string + TokenType string +} + +func (ooct *OutgoingOAuthConnectionToken) AsHeaderValue() string { + return ooct.TokenType + " " + ooct.AccessToken +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/outgoing_webhook.go b/vendor/github.com/mattermost/mattermost/server/public/model/outgoing_webhook.go new file mode 100644 index 00000000..907a8c1d --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/outgoing_webhook.go @@ -0,0 +1,237 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "fmt" + "net/http" + "net/url" + "slices" + "strconv" + "strings" +) + +type OutgoingWebhook struct { + Id string `json:"id"` + Token string `json:"token"` + CreateAt int64 `json:"create_at"` + UpdateAt int64 `json:"update_at"` + DeleteAt int64 `json:"delete_at"` + CreatorId string `json:"creator_id"` + ChannelId string `json:"channel_id"` + TeamId string `json:"team_id"` + TriggerWords StringArray `json:"trigger_words"` + TriggerWhen int `json:"trigger_when"` + CallbackURLs StringArray `json:"callback_urls"` + DisplayName string `json:"display_name"` + Description string `json:"description"` + ContentType string `json:"content_type"` + Username string `json:"username"` + IconURL string `json:"icon_url"` +} + +func (o *OutgoingWebhook) Auditable() map[string]any { + return map[string]any{ + "id": o.Id, + "create_at": o.CreateAt, + "update_at": o.UpdateAt, + "delete_at": o.DeleteAt, + "creator_id": o.CreatorId, + "channel_id": o.ChannelId, + "team_id": o.TeamId, + "trigger_words": o.TriggerWords, + "trigger_when": o.TriggerWhen, + "callback_urls": o.CallbackURLs, + "display_name": o.DisplayName, + "description": o.Description, + "content_type": o.ContentType, + "username": o.Username, + "icon_url": o.IconURL, + } +} + +type OutgoingWebhookPayload struct { + Token string `json:"token"` + TeamId string `json:"team_id"` + TeamDomain string `json:"team_domain"` + ChannelId string `json:"channel_id"` + ChannelName string `json:"channel_name"` + Timestamp int64 `json:"timestamp"` + UserId string `json:"user_id"` + UserName string `json:"user_name"` + PostId string `json:"post_id"` + Text string `json:"text"` + TriggerWord string `json:"trigger_word"` + FileIds string `json:"file_ids"` +} + +type OutgoingWebhookResponse struct { + Text *string `json:"text"` + Username string `json:"username"` + IconURL string `json:"icon_url"` + Props StringInterface `json:"props"` + Attachments []*SlackAttachment `json:"attachments"` + Type string `json:"type"` + ResponseType string `json:"response_type"` + Priority *PostPriority `json:"priority"` +} + +const OutgoingHookResponseTypeComment = "comment" + +func (o *OutgoingWebhookPayload) ToFormValues() string { + v := url.Values{} + v.Set("token", o.Token) + v.Set("team_id", o.TeamId) + v.Set("team_domain", o.TeamDomain) + v.Set("channel_id", o.ChannelId) + v.Set("channel_name", o.ChannelName) + v.Set("timestamp", strconv.FormatInt(o.Timestamp/1000, 10)) + v.Set("user_id", o.UserId) + v.Set("user_name", o.UserName) + v.Set("post_id", o.PostId) + v.Set("text", o.Text) + v.Set("trigger_word", o.TriggerWord) + v.Set("file_ids", o.FileIds) + + return v.Encode() +} + +func (o *OutgoingWebhook) IsValid() *AppError { + if !IsValidId(o.Id) { + return NewAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.id.app_error", nil, "", http.StatusBadRequest) + } + + if len(o.Token) != 26 { + return NewAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.token.app_error", nil, "", http.StatusBadRequest) + } + + if o.CreateAt == 0 { + return NewAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.create_at.app_error", nil, "id="+o.Id, http.StatusBadRequest) + } + + if o.UpdateAt == 0 { + return NewAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.update_at.app_error", nil, "id="+o.Id, http.StatusBadRequest) + } + + if !IsValidId(o.CreatorId) { + return NewAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.user_id.app_error", nil, "", http.StatusBadRequest) + } + + if o.ChannelId != "" && !IsValidId(o.ChannelId) { + return NewAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.channel_id.app_error", nil, "", http.StatusBadRequest) + } + + if !IsValidId(o.TeamId) { + return NewAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.team_id.app_error", nil, "", http.StatusBadRequest) + } + + if len(fmt.Sprintf("%s", o.TriggerWords)) > 1024 { + return NewAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.words.app_error", nil, "", http.StatusBadRequest) + } + + if len(o.TriggerWords) != 0 { + if slices.Contains(o.TriggerWords, "") { + return NewAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.trigger_words.app_error", nil, "", http.StatusBadRequest) + } + } + + if len(o.CallbackURLs) == 0 || len(fmt.Sprintf("%s", o.CallbackURLs)) > 1024 { + return NewAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.callback.app_error", nil, "", http.StatusBadRequest) + } + + for _, callback := range o.CallbackURLs { + if !IsValidHTTPURL(callback) { + return NewAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.url.app_error", nil, "", http.StatusBadRequest) + } + } + + if len(o.DisplayName) > 64 { + return NewAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.display_name.app_error", nil, "", http.StatusBadRequest) + } + + if len(o.Description) > 500 { + return NewAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.description.app_error", nil, "", http.StatusBadRequest) + } + + if len(o.ContentType) > 128 { + return NewAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.content_type.app_error", nil, "", http.StatusBadRequest) + } + + if o.TriggerWhen > 1 { + return NewAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.content_type.app_error", nil, "", http.StatusBadRequest) + } + + if len(o.Username) > 64 { + return NewAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.username.app_error", nil, "", http.StatusBadRequest) + } + + if len(o.IconURL) > 1024 { + return NewAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.icon_url.app_error", nil, "", http.StatusBadRequest) + } + + return nil +} + +func (o *OutgoingWebhook) PreSave() { + if o.Id == "" { + o.Id = NewId() + } + + if o.Token == "" { + o.Token = NewId() + } + + o.CreateAt = GetMillis() + o.UpdateAt = o.CreateAt +} + +func (o *OutgoingWebhook) PreUpdate() { + o.UpdateAt = GetMillis() +} + +func (o *OutgoingWebhook) TriggerWordExactMatch(word string) bool { + if word == "" { + return false + } + + return slices.Contains(o.TriggerWords, word) +} + +func (o *OutgoingWebhook) TriggerWordStartsWith(word string) bool { + if word == "" { + return false + } + + for _, trigger := range o.TriggerWords { + if strings.HasPrefix(word, trigger) { + return true + } + } + + return false +} + +func (o *OutgoingWebhook) GetTriggerWord(word string, isExactMatch bool) (triggerWord string) { + if word == "" { + return + } + + if isExactMatch { + for _, trigger := range o.TriggerWords { + if trigger == word { + triggerWord = trigger + break + } + } + } else { + for _, trigger := range o.TriggerWords { + if strings.HasPrefix(word, trigger) { + triggerWord = trigger + break + } + } + } + + return triggerWord +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/packet_metadata.go b/vendor/github.com/mattermost/mattermost/server/public/model/packet_metadata.go new file mode 100644 index 00000000..e2a2215e --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/packet_metadata.go @@ -0,0 +1,131 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "fmt" + + "github.com/blang/semver/v4" + "gopkg.in/yaml.v3" +) + +type PacketType string + +const ( + CurrentMetadataVersion int = 1 + SupportPacketType PacketType = "support-packet" + PluginPacketType PacketType = "plugin-packet" + + PacketMetadataFileName = "metadata.yaml" +) + +// PacketMetadata contains information about the server and the configured license (if there is one), +// It's used in file archives, so called Packets, that customer send to Mattermost Staff for review. +// For example, this metadata is attached to the Support Packet and the Metrics plugin Packet. +type PacketMetadata struct { + // Required Fields + + Version int `yaml:"version"` + Type PacketType `yaml:"type"` + GeneratedAt int64 `yaml:"generated_at"` + ServerVersion string `yaml:"server_version"` + ServerID string `yaml:"server_id"` + + // Optional Fields + + LicenseID string `yaml:"license_id"` + CustomerID string `yaml:"customer_id"` + Extras map[string]any `yaml:"extras,omitempty"` +} + +func (md *PacketMetadata) Validate() error { + if md.Version < 1 { + return fmt.Errorf("metadata version should be greater than 1") + } + + switch md.Type { + case SupportPacketType, PluginPacketType: + default: + return fmt.Errorf("unrecognized packet type: %s", md.Type) + } + + if md.GeneratedAt <= 0 { + return fmt.Errorf("generated_at should be a positive number") + } + + if _, err := semver.ParseTolerant(md.ServerVersion); err != nil { + return fmt.Errorf("could not parse server version: %w", err) + } + + if !IsValidId(md.ServerID) { + return fmt.Errorf("server id is not a valid id %q", md.ServerID) + } + + if md.LicenseID != "" && !IsValidId(md.LicenseID) { + return fmt.Errorf("license id is not a valid id %q", md.LicenseID) + } + + if md.CustomerID != "" && !IsValidId(md.CustomerID) { + return fmt.Errorf("customer id is not a valid id %q", md.CustomerID) + } + + return nil +} + +func ParsePacketMetadata(b []byte) (*PacketMetadata, error) { + v := struct { + Version int `yaml:"version"` + }{} + + err := yaml.Unmarshal(b, &v) + if err != nil { + return nil, err + } + + switch v.Version { + case 1: + var md PacketMetadata + err = yaml.Unmarshal(b, &md) + if err != nil { + return nil, err + } + + err = md.Validate() + if err != nil { + return nil, err + } + + return &md, nil + default: + return nil, fmt.Errorf("unsupported metadata version: %d", v.Version) + } +} + +// GeneratePacketMetadata is a utility function to generate metadata for customer provided Packets. +// It will construct it from a Packet Type, the telemetryID and optionally a license. +func GeneratePacketMetadata(t PacketType, telemetryID string, license *License, extra map[string]any) (*PacketMetadata, error) { + if extra == nil { + extra = make(map[string]any) + } + + md := &PacketMetadata{ + Version: CurrentMetadataVersion, + Type: t, + GeneratedAt: GetMillis(), + ServerVersion: CurrentVersion, + ServerID: telemetryID, + Extras: extra, + } + + if license != nil { + md.LicenseID = license.Id + md.CustomerID = license.Customer.Id + } + + if err := md.Validate(); err != nil { + return nil, fmt.Errorf("invalid metadata: %w", err) + } + + return md, nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/permalink.go b/vendor/github.com/mattermost/mattermost/server/public/model/permalink.go new file mode 100644 index 00000000..12645646 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/permalink.go @@ -0,0 +1,31 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +type Permalink struct { + PreviewPost *PreviewPost `json:"preview_post"` +} + +type PreviewPost struct { + PostID string `json:"post_id"` + Post *Post `json:"post"` + TeamName string `json:"team_name"` + ChannelDisplayName string `json:"channel_display_name"` + ChannelType ChannelType `json:"channel_type"` + ChannelID string `json:"channel_id"` +} + +func NewPreviewPost(post *Post, team *Team, channel *Channel) *PreviewPost { + if post == nil { + return nil + } + return &PreviewPost{ + PostID: post.Id, + Post: post, + TeamName: team.Name, + ChannelDisplayName: channel.DisplayName, + ChannelType: channel.Type, + ChannelID: channel.Id, + } +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/permission.go b/vendor/github.com/mattermost/mattermost/server/public/model/permission.go new file mode 100644 index 00000000..2393c0c2 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/permission.go @@ -0,0 +1,2706 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "net/http" + "strings" +) + +const ( + PermissionScopeSystem = "system_scope" + PermissionScopeTeam = "team_scope" + PermissionScopeChannel = "channel_scope" + PermissionScopeGroup = "group_scope" + PermissionScopePlaybook = "playbook_scope" + PermissionScopeRun = "run_scope" +) + +type Permission struct { + Id string `json:"id"` + Name string `json:"name"` + Description string `json:"description"` + Scope string `json:"scope"` +} + +var PermissionInviteUser *Permission +var PermissionAddUserToTeam *Permission + +// Deprecated: PermissionUseSlashCommands is not longer used. It's only kept for backwards compatibility. +// See https://mattermost.atlassian.net/browse/MM-52574 for more details. +var PermissionUseSlashCommands *Permission +var PermissionManageSlashCommands *Permission +var PermissionManageOthersSlashCommands *Permission +var PermissionCreatePublicChannel *Permission +var PermissionCreatePrivateChannel *Permission +var PermissionManagePublicChannelMembers *Permission +var PermissionManagePrivateChannelMembers *Permission +var PermissionConvertPublicChannelToPrivate *Permission +var PermissionConvertPrivateChannelToPublic *Permission +var PermissionAssignSystemAdminRole *Permission +var PermissionManageRoles *Permission +var PermissionManageTeamRoles *Permission +var PermissionManageChannelRoles *Permission +var PermissionCreateDirectChannel *Permission +var PermissionCreateGroupChannel *Permission +var PermissionManagePublicChannelProperties *Permission +var PermissionManagePrivateChannelProperties *Permission +var PermissionListPublicTeams *Permission +var PermissionJoinPublicTeams *Permission +var PermissionListPrivateTeams *Permission +var PermissionJoinPrivateTeams *Permission +var PermissionListTeamChannels *Permission +var PermissionJoinPublicChannels *Permission +var PermissionDeletePublicChannel *Permission +var PermissionDeletePrivateChannel *Permission +var PermissionEditOtherUsers *Permission +var PermissionReadChannel *Permission +var PermissionReadChannelContent *Permission +var PermissionReadPublicChannelGroups *Permission +var PermissionReadPrivateChannelGroups *Permission +var PermissionReadPublicChannel *Permission +var PermissionAddReaction *Permission +var PermissionRemoveReaction *Permission +var PermissionRemoveOthersReactions *Permission +var PermissionPermanentDeleteUser *Permission +var PermissionUploadFile *Permission +var PermissionGetPublicLink *Permission +var PermissionManageWebhooks *Permission +var PermissionManageOthersWebhooks *Permission +var PermissionManageIncomingWebhooks *Permission +var PermissionManageOwnIncomingWebhooks *Permission +var PermissionManageOutgoingWebhooks *Permission +var PermissionManageOwnOutgoingWebhooks *Permission +var PermissionManageOthersIncomingWebhooks *Permission +var PermissionManageOthersOutgoingWebhooks *Permission +var PermissionManageOwnSlashCommands *Permission +var PermissionManageOAuth *Permission +var PermissionManageSystemWideOAuth *Permission +var PermissionBypassIncomingWebhookChannelLock *Permission +var PermissionManageEmojis *Permission +var PermissionManageOthersEmojis *Permission +var PermissionCreateEmojis *Permission +var PermissionDeleteEmojis *Permission +var PermissionDeleteOthersEmojis *Permission +var PermissionCreatePost *Permission +var PermissionCreatePostPublic *Permission +var PermissionCreatePostEphemeral *Permission +var PermissionReadDeletedPosts *Permission +var PermissionEditPost *Permission +var PermissionEditOthersPosts *Permission +var PermissionDeletePost *Permission +var PermissionDeleteOthersPosts *Permission +var PermissionRemoveUserFromTeam *Permission +var PermissionCreateTeam *Permission +var PermissionManageTeam *Permission +var PermissionImportTeam *Permission +var PermissionViewTeam *Permission +var PermissionListUsersWithoutTeam *Permission +var PermissionReadJobs *Permission +var PermissionManageJobs *Permission +var PermissionCreateUserAccessToken *Permission +var PermissionReadUserAccessToken *Permission +var PermissionRevokeUserAccessToken *Permission +var PermissionCreateBot *Permission +var PermissionAssignBot *Permission +var PermissionReadBots *Permission +var PermissionReadOthersBots *Permission +var PermissionManageBots *Permission +var PermissionManageOthersBots *Permission +var PermissionViewMembers *Permission +var PermissionInviteGuest *Permission +var PermissionPromoteGuest *Permission +var PermissionDemoteToGuest *Permission +var PermissionUseChannelMentions *Permission +var PermissionUseGroupMentions *Permission +var PermissionAddBookmarkPublicChannel *Permission +var PermissionEditBookmarkPublicChannel *Permission +var PermissionDeleteBookmarkPublicChannel *Permission +var PermissionOrderBookmarkPublicChannel *Permission +var PermissionAddBookmarkPrivateChannel *Permission +var PermissionEditBookmarkPrivateChannel *Permission +var PermissionDeleteBookmarkPrivateChannel *Permission +var PermissionOrderBookmarkPrivateChannel *Permission +var PermissionReadOtherUsersTeams *Permission +var PermissionEditBrand *Permission +var PermissionManageSharedChannels *Permission +var PermissionManageSecureConnections *Permission +var PermissionDownloadComplianceExportResult *Permission +var PermissionCreateDataRetentionJob *Permission +var PermissionManageDataRetentionJob *Permission +var PermissionReadDataRetentionJob *Permission +var PermissionCreateComplianceExportJob *Permission +var PermissionManageComplianceExportJob *Permission +var PermissionReadComplianceExportJob *Permission +var PermissionReadAudits *Permission +var PermissionTestElasticsearch *Permission +var PermissionTestSiteURL *Permission +var PermissionTestS3 *Permission +var PermissionReloadConfig *Permission +var PermissionInvalidateCaches *Permission +var PermissionRecycleDatabaseConnections *Permission +var PermissionPurgeElasticsearchIndexes *Permission +var PermissionTestEmail *Permission +var PermissionCreateElasticsearchPostIndexingJob *Permission +var PermissionManageElasticsearchPostIndexingJob *Permission +var PermissionCreateElasticsearchPostAggregationJob *Permission +var PermissionManageElasticsearchPostAggregationJob *Permission +var PermissionReadElasticsearchPostIndexingJob *Permission +var PermissionReadElasticsearchPostAggregationJob *Permission +var PermissionPurgeBleveIndexes *Permission +var PermissionCreatePostBleveIndexesJob *Permission +var PermissionManagePostBleveIndexesJob *Permission +var PermissionCreateLdapSyncJob *Permission +var PermissionManageLdapSyncJob *Permission +var PermissionReadLdapSyncJob *Permission +var PermissionTestLdap *Permission +var PermissionInvalidateEmailInvite *Permission +var PermissionGetSamlMetadataFromIdp *Permission +var PermissionAddSamlPublicCert *Permission +var PermissionAddSamlPrivateCert *Permission +var PermissionAddSamlIdpCert *Permission +var PermissionRemoveSamlPublicCert *Permission +var PermissionRemoveSamlPrivateCert *Permission +var PermissionRemoveSamlIdpCert *Permission +var PermissionGetSamlCertStatus *Permission +var PermissionAddLdapPublicCert *Permission +var PermissionAddLdapPrivateCert *Permission +var PermissionRemoveLdapPublicCert *Permission +var PermissionRemoveLdapPrivateCert *Permission +var PermissionGetLogs *Permission +var PermissionGetAnalytics *Permission +var PermissionReadLicenseInformation *Permission +var PermissionManageLicenseInformation *Permission +var PermissionManagePublicChannelBanner *Permission +var PermissionManagePrivateChannelBanner *Permission +var PermissionManageChannelAccessRules *Permission + +var PermissionSysconsoleReadAbout *Permission +var PermissionSysconsoleWriteAbout *Permission + +var PermissionSysconsoleReadAboutEditionAndLicense *Permission +var PermissionSysconsoleWriteAboutEditionAndLicense *Permission + +var PermissionSysconsoleReadBilling *Permission +var PermissionSysconsoleWriteBilling *Permission + +var PermissionSysconsoleReadReporting *Permission +var PermissionSysconsoleWriteReporting *Permission + +var PermissionSysconsoleReadReportingSiteStatistics *Permission +var PermissionSysconsoleWriteReportingSiteStatistics *Permission + +var PermissionSysconsoleReadReportingTeamStatistics *Permission +var PermissionSysconsoleWriteReportingTeamStatistics *Permission + +var PermissionSysconsoleReadReportingServerLogs *Permission +var PermissionSysconsoleWriteReportingServerLogs *Permission + +var PermissionSysconsoleReadUserManagementUsers *Permission +var PermissionSysconsoleWriteUserManagementUsers *Permission + +var PermissionSysconsoleReadUserManagementGroups *Permission +var PermissionSysconsoleWriteUserManagementGroups *Permission + +var PermissionSysconsoleReadUserManagementTeams *Permission +var PermissionSysconsoleWriteUserManagementTeams *Permission + +var PermissionSysconsoleReadUserManagementChannels *Permission +var PermissionSysconsoleWriteUserManagementChannels *Permission + +var PermissionSysconsoleReadUserManagementPermissions *Permission +var PermissionSysconsoleWriteUserManagementPermissions *Permission + +var PermissionSysconsoleReadUserManagementSystemRoles *Permission +var PermissionSysconsoleWriteUserManagementSystemRoles *Permission + +// DEPRECATED +var PermissionSysconsoleReadEnvironment *Permission + +// DEPRECATED +var PermissionSysconsoleWriteEnvironment *Permission + +var PermissionSysconsoleReadEnvironmentWebServer *Permission +var PermissionSysconsoleWriteEnvironmentWebServer *Permission + +var PermissionSysconsoleReadEnvironmentDatabase *Permission +var PermissionSysconsoleWriteEnvironmentDatabase *Permission + +var PermissionSysconsoleReadEnvironmentElasticsearch *Permission +var PermissionSysconsoleWriteEnvironmentElasticsearch *Permission + +var PermissionSysconsoleReadEnvironmentFileStorage *Permission +var PermissionSysconsoleWriteEnvironmentFileStorage *Permission + +var PermissionSysconsoleReadEnvironmentImageProxy *Permission +var PermissionSysconsoleWriteEnvironmentImageProxy *Permission + +var PermissionSysconsoleReadEnvironmentSMTP *Permission +var PermissionSysconsoleWriteEnvironmentSMTP *Permission + +var PermissionSysconsoleReadEnvironmentPushNotificationServer *Permission +var PermissionSysconsoleWriteEnvironmentPushNotificationServer *Permission + +var PermissionSysconsoleReadEnvironmentHighAvailability *Permission +var PermissionSysconsoleWriteEnvironmentHighAvailability *Permission + +var PermissionSysconsoleReadEnvironmentRateLimiting *Permission +var PermissionSysconsoleWriteEnvironmentRateLimiting *Permission + +var PermissionSysconsoleReadEnvironmentLogging *Permission +var PermissionSysconsoleWriteEnvironmentLogging *Permission + +var PermissionSysconsoleReadEnvironmentSessionLengths *Permission +var PermissionSysconsoleWriteEnvironmentSessionLengths *Permission + +var PermissionSysconsoleReadEnvironmentPerformanceMonitoring *Permission +var PermissionSysconsoleWriteEnvironmentPerformanceMonitoring *Permission + +var PermissionSysconsoleReadEnvironmentDeveloper *Permission +var PermissionSysconsoleWriteEnvironmentDeveloper *Permission + +var PermissionSysconsoleReadEnvironmentMobileSecurity *Permission +var PermissionSysconsoleWriteEnvironmentMobileSecurity *Permission + +var PermissionSysconsoleReadSite *Permission +var PermissionSysconsoleWriteSite *Permission + +var PermissionSysconsoleReadSiteCustomization *Permission +var PermissionSysconsoleWriteSiteCustomization *Permission + +var PermissionSysconsoleReadSiteLocalization *Permission +var PermissionSysconsoleWriteSiteLocalization *Permission + +var PermissionSysconsoleReadSiteUsersAndTeams *Permission +var PermissionSysconsoleWriteSiteUsersAndTeams *Permission + +var PermissionSysconsoleReadSiteNotifications *Permission +var PermissionSysconsoleWriteSiteNotifications *Permission + +var PermissionSysconsoleReadSiteAnnouncementBanner *Permission +var PermissionSysconsoleWriteSiteAnnouncementBanner *Permission + +var PermissionSysconsoleReadSiteEmoji *Permission +var PermissionSysconsoleWriteSiteEmoji *Permission + +var PermissionSysconsoleReadSitePosts *Permission +var PermissionSysconsoleWriteSitePosts *Permission + +var PermissionSysconsoleReadSiteFileSharingAndDownloads *Permission +var PermissionSysconsoleWriteSiteFileSharingAndDownloads *Permission + +var PermissionSysconsoleReadSitePublicLinks *Permission +var PermissionSysconsoleWriteSitePublicLinks *Permission + +var PermissionSysconsoleReadSiteNotices *Permission +var PermissionSysconsoleWriteSiteNotices *Permission + +var PermissionSysconsoleReadIPFilters *Permission +var PermissionSysconsoleWriteIPFilters *Permission + +var PermissionSysconsoleReadAuthentication *Permission +var PermissionSysconsoleWriteAuthentication *Permission + +var PermissionSysconsoleReadAuthenticationSignup *Permission +var PermissionSysconsoleWriteAuthenticationSignup *Permission + +var PermissionSysconsoleReadAuthenticationEmail *Permission +var PermissionSysconsoleWriteAuthenticationEmail *Permission + +var PermissionSysconsoleReadAuthenticationPassword *Permission +var PermissionSysconsoleWriteAuthenticationPassword *Permission + +var PermissionSysconsoleReadAuthenticationMfa *Permission +var PermissionSysconsoleWriteAuthenticationMfa *Permission + +var PermissionSysconsoleReadAuthenticationLdap *Permission +var PermissionSysconsoleWriteAuthenticationLdap *Permission + +var PermissionSysconsoleReadAuthenticationSaml *Permission +var PermissionSysconsoleWriteAuthenticationSaml *Permission + +var PermissionSysconsoleReadAuthenticationOpenid *Permission +var PermissionSysconsoleWriteAuthenticationOpenid *Permission + +var PermissionSysconsoleReadAuthenticationGuestAccess *Permission +var PermissionSysconsoleWriteAuthenticationGuestAccess *Permission + +var PermissionSysconsoleReadPlugins *Permission +var PermissionSysconsoleWritePlugins *Permission + +var PermissionSysconsoleReadIntegrations *Permission +var PermissionSysconsoleWriteIntegrations *Permission + +var PermissionSysconsoleReadIntegrationsIntegrationManagement *Permission +var PermissionSysconsoleWriteIntegrationsIntegrationManagement *Permission + +var PermissionSysconsoleReadIntegrationsBotAccounts *Permission +var PermissionSysconsoleWriteIntegrationsBotAccounts *Permission + +var PermissionSysconsoleReadIntegrationsGif *Permission +var PermissionSysconsoleWriteIntegrationsGif *Permission + +var PermissionSysconsoleReadIntegrationsCors *Permission +var PermissionSysconsoleWriteIntegrationsCors *Permission + +var PermissionSysconsoleReadCompliance *Permission +var PermissionSysconsoleWriteCompliance *Permission + +var PermissionSysconsoleReadComplianceDataRetentionPolicy *Permission +var PermissionSysconsoleWriteComplianceDataRetentionPolicy *Permission + +var PermissionSysconsoleReadComplianceComplianceExport *Permission +var PermissionSysconsoleWriteComplianceComplianceExport *Permission + +var PermissionSysconsoleReadComplianceComplianceMonitoring *Permission +var PermissionSysconsoleWriteComplianceComplianceMonitoring *Permission + +var PermissionSysconsoleReadComplianceCustomTermsOfService *Permission +var PermissionSysconsoleWriteComplianceCustomTermsOfService *Permission + +var PermissionSysconsoleReadExperimental *Permission +var PermissionSysconsoleWriteExperimental *Permission + +var PermissionSysconsoleReadExperimentalFeatures *Permission +var PermissionSysconsoleWriteExperimentalFeatures *Permission + +var PermissionSysconsoleReadExperimentalFeatureFlags *Permission +var PermissionSysconsoleWriteExperimentalFeatureFlags *Permission + +var PermissionSysconsoleReadExperimentalBleve *Permission +var PermissionSysconsoleWriteExperimentalBleve *Permission + +var PermissionPublicPlaybookCreate *Permission +var PermissionPublicPlaybookManageProperties *Permission +var PermissionPublicPlaybookManageMembers *Permission +var PermissionPublicPlaybookManageRoles *Permission +var PermissionPublicPlaybookView *Permission +var PermissionPublicPlaybookMakePrivate *Permission + +var PermissionPrivatePlaybookCreate *Permission +var PermissionPrivatePlaybookManageProperties *Permission +var PermissionPrivatePlaybookManageMembers *Permission +var PermissionPrivatePlaybookManageRoles *Permission +var PermissionPrivatePlaybookView *Permission +var PermissionPrivatePlaybookMakePublic *Permission + +var PermissionRunCreate *Permission +var PermissionRunManageProperties *Permission +var PermissionRunManageMembers *Permission +var PermissionRunView *Permission + +var PermissionSysconsoleReadProductsBoards *Permission +var PermissionSysconsoleWriteProductsBoards *Permission + +// PermissionManageSystem is a general permission that encompasses all system admin functions +// in the future this could be broken up to allow access to some +// admin functions but not others +var PermissionManageSystem *Permission + +var PermissionCreateCustomGroup *Permission +var PermissionManageCustomGroupMembers *Permission +var PermissionEditCustomGroup *Permission +var PermissionDeleteCustomGroup *Permission +var PermissionRestoreCustomGroup *Permission + +var AllPermissions []*Permission +var DeprecatedPermissions []*Permission + +var ChannelModeratedPermissions []string +var ChannelModeratedPermissionsMap map[string]string + +var SysconsoleReadPermissions []*Permission +var SysconsoleWritePermissions []*Permission + +var PermissionManageOutgoingOAuthConnections *Permission +var ModeratedBookmarkPermissions []*Permission + +func initializePermissions() { + PermissionInviteUser = &Permission{ + "invite_user", + "authentication.permissions.team_invite_user.name", + "authentication.permissions.team_invite_user.description", + PermissionScopeTeam, + } + PermissionAddUserToTeam = &Permission{ + "add_user_to_team", + "authentication.permissions.add_user_to_team.name", + "authentication.permissions.add_user_to_team.description", + PermissionScopeTeam, + } + PermissionUseSlashCommands = &Permission{ + "use_slash_commands", + "authentication.permissions.team_use_slash_commands.name", + "authentication.permissions.team_use_slash_commands.description", + PermissionScopeChannel, + } + // DEPRECATED - use PermissionManageOwnSlashCommands instead + PermissionManageSlashCommands = &Permission{ + "manage_slash_commands", + "authentication.permissions.manage_slash_commands.name", + "authentication.permissions.manage_slash_commands.description", + PermissionScopeTeam, + } + PermissionManageOwnSlashCommands = &Permission{ + "manage_own_slash_commands", + "authentication.permissions.manage_own_slash_commands.name", + "authentication.permissions.manage_own_slash_commands.description", + PermissionScopeTeam, + } + PermissionManageOthersSlashCommands = &Permission{ + "manage_others_slash_commands", + "authentication.permissions.manage_others_slash_commands.name", + "authentication.permissions.manage_others_slash_commands.description", + PermissionScopeTeam, + } + PermissionCreatePublicChannel = &Permission{ + "create_public_channel", + "authentication.permissions.create_public_channel.name", + "authentication.permissions.create_public_channel.description", + PermissionScopeTeam, + } + PermissionCreatePrivateChannel = &Permission{ + "create_private_channel", + "authentication.permissions.create_private_channel.name", + "authentication.permissions.create_private_channel.description", + PermissionScopeTeam, + } + PermissionManagePublicChannelMembers = &Permission{ + "manage_public_channel_members", + "authentication.permissions.manage_public_channel_members.name", + "authentication.permissions.manage_public_channel_members.description", + PermissionScopeChannel, + } + PermissionManagePrivateChannelMembers = &Permission{ + "manage_private_channel_members", + "authentication.permissions.manage_private_channel_members.name", + "authentication.permissions.manage_private_channel_members.description", + PermissionScopeChannel, + } + PermissionConvertPublicChannelToPrivate = &Permission{ + "convert_public_channel_to_private", + "authentication.permissions.convert_public_channel_to_private.name", + "authentication.permissions.convert_public_channel_to_private.description", + PermissionScopeChannel, + } + PermissionConvertPrivateChannelToPublic = &Permission{ + "convert_private_channel_to_public", + "authentication.permissions.convert_private_channel_to_public.name", + "authentication.permissions.convert_private_channel_to_public.description", + PermissionScopeChannel, + } + PermissionAssignSystemAdminRole = &Permission{ + "assign_system_admin_role", + "authentication.permissions.assign_system_admin_role.name", + "authentication.permissions.assign_system_admin_role.description", + PermissionScopeSystem, + } + PermissionManageRoles = &Permission{ + "manage_roles", + "authentication.permissions.manage_roles.name", + "authentication.permissions.manage_roles.description", + PermissionScopeSystem, + } + PermissionManageTeamRoles = &Permission{ + "manage_team_roles", + "authentication.permissions.manage_team_roles.name", + "authentication.permissions.manage_team_roles.description", + PermissionScopeTeam, + } + PermissionManageChannelRoles = &Permission{ + "manage_channel_roles", + "authentication.permissions.manage_channel_roles.name", + "authentication.permissions.manage_channel_roles.description", + PermissionScopeChannel, + } + PermissionManageSystem = &Permission{ + "manage_system", + "authentication.permissions.manage_system.name", + "authentication.permissions.manage_system.description", + PermissionScopeSystem, + } + PermissionCreateDirectChannel = &Permission{ + "create_direct_channel", + "authentication.permissions.create_direct_channel.name", + "authentication.permissions.create_direct_channel.description", + PermissionScopeSystem, + } + PermissionCreateGroupChannel = &Permission{ + "create_group_channel", + "authentication.permissions.create_group_channel.name", + "authentication.permissions.create_group_channel.description", + PermissionScopeSystem, + } + PermissionManagePublicChannelProperties = &Permission{ + "manage_public_channel_properties", + "authentication.permissions.manage_public_channel_properties.name", + "authentication.permissions.manage_public_channel_properties.description", + PermissionScopeChannel, + } + PermissionManagePrivateChannelProperties = &Permission{ + "manage_private_channel_properties", + "authentication.permissions.manage_private_channel_properties.name", + "authentication.permissions.manage_private_channel_properties.description", + PermissionScopeChannel, + } + PermissionListPublicTeams = &Permission{ + "list_public_teams", + "authentication.permissions.list_public_teams.name", + "authentication.permissions.list_public_teams.description", + PermissionScopeSystem, + } + PermissionJoinPublicTeams = &Permission{ + "join_public_teams", + "authentication.permissions.join_public_teams.name", + "authentication.permissions.join_public_teams.description", + PermissionScopeSystem, + } + PermissionListPrivateTeams = &Permission{ + "list_private_teams", + "authentication.permissions.list_private_teams.name", + "authentication.permissions.list_private_teams.description", + PermissionScopeSystem, + } + PermissionJoinPrivateTeams = &Permission{ + "join_private_teams", + "authentication.permissions.join_private_teams.name", + "authentication.permissions.join_private_teams.description", + PermissionScopeSystem, + } + PermissionListTeamChannels = &Permission{ + "list_team_channels", + "authentication.permissions.list_team_channels.name", + "authentication.permissions.list_team_channels.description", + PermissionScopeTeam, + } + PermissionJoinPublicChannels = &Permission{ + "join_public_channels", + "authentication.permissions.join_public_channels.name", + "authentication.permissions.join_public_channels.description", + PermissionScopeTeam, + } + PermissionDeletePublicChannel = &Permission{ + "delete_public_channel", + "authentication.permissions.delete_public_channel.name", + "authentication.permissions.delete_public_channel.description", + PermissionScopeChannel, + } + PermissionDeletePrivateChannel = &Permission{ + "delete_private_channel", + "authentication.permissions.delete_private_channel.name", + "authentication.permissions.delete_private_channel.description", + PermissionScopeChannel, + } + PermissionEditOtherUsers = &Permission{ + "edit_other_users", + "authentication.permissions.edit_other_users.name", + "authentication.permissions.edit_other_users.description", + PermissionScopeSystem, + } + PermissionReadChannel = &Permission{ + "read_channel", + "authentication.permissions.read_channel.name", + "authentication.permissions.read_channel.description", + PermissionScopeChannel, + } + PermissionReadChannelContent = &Permission{ + "read_channel_content", + "authentication.permissions.read_channel_content.name", + "authentication.permissions.read_channel_content.description", + PermissionScopeChannel, + } + PermissionReadPublicChannelGroups = &Permission{ + "read_public_channel_groups", + "authentication.permissions.read_public_channel_groups.name", + "authentication.permissions.read_public_channel_groups.description", + PermissionScopeChannel, + } + PermissionReadPrivateChannelGroups = &Permission{ + "read_private_channel_groups", + "authentication.permissions.read_private_channel_groups.name", + "authentication.permissions.read_private_channel_groups.description", + PermissionScopeChannel, + } + PermissionReadPublicChannel = &Permission{ + "read_public_channel", + "authentication.permissions.read_public_channel.name", + "authentication.permissions.read_public_channel.description", + PermissionScopeTeam, + } + PermissionAddReaction = &Permission{ + "add_reaction", + "authentication.permissions.add_reaction.name", + "authentication.permissions.add_reaction.description", + PermissionScopeChannel, + } + PermissionRemoveReaction = &Permission{ + "remove_reaction", + "authentication.permissions.remove_reaction.name", + "authentication.permissions.remove_reaction.description", + PermissionScopeChannel, + } + PermissionRemoveOthersReactions = &Permission{ + "remove_others_reactions", + "authentication.permissions.remove_others_reactions.name", + "authentication.permissions.remove_others_reactions.description", + PermissionScopeChannel, + } + // DEPRECATED + PermissionPermanentDeleteUser = &Permission{ + "permanent_delete_user", + "authentication.permissions.permanent_delete_user.name", + "authentication.permissions.permanent_delete_user.description", + PermissionScopeSystem, + } + PermissionUploadFile = &Permission{ + "upload_file", + "authentication.permissions.upload_file.name", + "authentication.permissions.upload_file.description", + PermissionScopeChannel, + } + PermissionGetPublicLink = &Permission{ + "get_public_link", + "authentication.permissions.get_public_link.name", + "authentication.permissions.get_public_link.description", + PermissionScopeSystem, + } + // DEPRECATED + PermissionManageWebhooks = &Permission{ + "manage_webhooks", + "authentication.permissions.manage_webhooks.name", + "authentication.permissions.manage_webhooks.description", + PermissionScopeTeam, + } + // DEPRECATED + PermissionManageOthersWebhooks = &Permission{ + "manage_others_webhooks", + "authentication.permissions.manage_others_webhooks.name", + "authentication.permissions.manage_others_webhooks.description", + PermissionScopeTeam, + } + // DEPRECATED - use PermissionManageOwnIncomingWebhooks instead + PermissionManageIncomingWebhooks = &Permission{ + "manage_incoming_webhooks", + "authentication.permissions.manage_incoming_webhooks.name", + "authentication.permissions.manage_incoming_webhooks.description", + PermissionScopeTeam, + } + PermissionManageOwnIncomingWebhooks = &Permission{ + "manage_own_incoming_webhooks", + "authentication.permissions.manage_own_incoming_webhooks.name", + "authentication.permissions.manage_own_incoming_webhooks.description", + PermissionScopeTeam, + } + // DEPRECATED - use PermissionManageOwnOutgoingWebhooks instead + PermissionManageOutgoingWebhooks = &Permission{ + "manage_outgoing_webhooks", + "authentication.permissions.manage_outgoing_webhooks.name", + "authentication.permissions.manage_outgoing_webhooks.description", + PermissionScopeTeam, + } + PermissionManageOwnOutgoingWebhooks = &Permission{ + "manage_own_outgoing_webhooks", + "authentication.permissions.manage_own_outgoing_webhooks.name", + "authentication.permissions.manage_own_outgoing_webhooks.description", + PermissionScopeTeam, + } + PermissionManageOthersIncomingWebhooks = &Permission{ + "manage_others_incoming_webhooks", + "authentication.permissions.manage_others_incoming_webhooks.name", + "authentication.permissions.manage_others_incoming_webhooks.description", + PermissionScopeTeam, + } + PermissionManageOthersOutgoingWebhooks = &Permission{ + "manage_others_outgoing_webhooks", + "authentication.permissions.manage_others_outgoing_webhooks.name", + "authentication.permissions.manage_others_outgoing_webhooks.description", + PermissionScopeTeam, + } + PermissionBypassIncomingWebhookChannelLock = &Permission{ + "bypass_incoming_webhook_channel_lock", + "authentication.permissions.bypass_incoming_webhook_channel_lock.name", + "authentication.permissions.bypass_incoming_webhook_channel_lock.description", + PermissionScopeTeam, + } + PermissionManageOAuth = &Permission{ + "manage_oauth", + "authentication.permissions.manage_oauth.name", + "authentication.permissions.manage_oauth.description", + PermissionScopeSystem, + } + PermissionManageSystemWideOAuth = &Permission{ + "manage_system_wide_oauth", + "authentication.permissions.manage_system_wide_oauth.name", + "authentication.permissions.manage_system_wide_oauth.description", + PermissionScopeSystem, + } + // DEPRECATED + PermissionManageEmojis = &Permission{ + "manage_emojis", + "authentication.permissions.manage_emojis.name", + "authentication.permissions.manage_emojis.description", + PermissionScopeTeam, + } + // DEPRECATED + PermissionManageOthersEmojis = &Permission{ + "manage_others_emojis", + "authentication.permissions.manage_others_emojis.name", + "authentication.permissions.manage_others_emojis.description", + PermissionScopeTeam, + } + PermissionCreateEmojis = &Permission{ + "create_emojis", + "authentication.permissions.create_emojis.name", + "authentication.permissions.create_emojis.description", + PermissionScopeTeam, + } + PermissionDeleteEmojis = &Permission{ + "delete_emojis", + "authentication.permissions.delete_emojis.name", + "authentication.permissions.delete_emojis.description", + PermissionScopeTeam, + } + PermissionDeleteOthersEmojis = &Permission{ + "delete_others_emojis", + "authentication.permissions.delete_others_emojis.name", + "authentication.permissions.delete_others_emojis.description", + PermissionScopeTeam, + } + PermissionCreatePost = &Permission{ + "create_post", + "authentication.permissions.create_post.name", + "authentication.permissions.create_post.description", + PermissionScopeChannel, + } + PermissionCreatePostPublic = &Permission{ + "create_post_public", + "authentication.permissions.create_post_public.name", + "authentication.permissions.create_post_public.description", + PermissionScopeChannel, + } + PermissionCreatePostEphemeral = &Permission{ + "create_post_ephemeral", + "authentication.permissions.create_post_ephemeral.name", + "authentication.permissions.create_post_ephemeral.description", + PermissionScopeChannel, + } + PermissionReadDeletedPosts = &Permission{ + "read_deleted_posts", + "authentication.permissions.read_deleted_posts.name", + "authentication.permissions.read_deleted_posts.description", + PermissionScopeChannel, + } + PermissionEditPost = &Permission{ + "edit_post", + "authentication.permissions.edit_post.name", + "authentication.permissions.edit_post.description", + PermissionScopeChannel, + } + PermissionEditOthersPosts = &Permission{ + "edit_others_posts", + "authentication.permissions.edit_others_posts.name", + "authentication.permissions.edit_others_posts.description", + PermissionScopeChannel, + } + PermissionDeletePost = &Permission{ + "delete_post", + "authentication.permissions.delete_post.name", + "authentication.permissions.delete_post.description", + PermissionScopeChannel, + } + PermissionDeleteOthersPosts = &Permission{ + "delete_others_posts", + "authentication.permissions.delete_others_posts.name", + "authentication.permissions.delete_others_posts.description", + PermissionScopeChannel, + } + PermissionManageSharedChannels = &Permission{ + "manage_shared_channels", + "authentication.permissions.manage_shared_channels.name", + "authentication.permissions.manage_shared_channels.description", + PermissionScopeSystem, + } + PermissionManageSecureConnections = &Permission{ + "manage_secure_connections", + "authentication.permissions.manage_secure_connections.name", + "authentication.permissions.manage_secure_connections.description", + PermissionScopeSystem, + } + + PermissionCreateDataRetentionJob = &Permission{ + "create_data_retention_job", + "", + "", + PermissionScopeSystem, + } + PermissionManageDataRetentionJob = &Permission{ + "manage_data_retention_job", + "", + "", + PermissionScopeSystem, + } + PermissionReadDataRetentionJob = &Permission{ + "read_data_retention_job", + "", + "", + PermissionScopeSystem, + } + + PermissionCreateComplianceExportJob = &Permission{ + "create_compliance_export_job", + "", + "", + PermissionScopeSystem, + } + PermissionManageComplianceExportJob = &Permission{ + "manage_compliance_export_job", + "", + "", + PermissionScopeSystem, + } + PermissionReadComplianceExportJob = &Permission{ + "read_compliance_export_job", + "", + "", + PermissionScopeSystem, + } + + PermissionReadAudits = &Permission{ + "read_audits", + "", + "", + PermissionScopeSystem, + } + + // DEPRECATED + PermissionPurgeBleveIndexes = &Permission{ + "purge_bleve_indexes", + "", + "", + PermissionScopeSystem, + } + + // DEPRECATED + PermissionCreatePostBleveIndexesJob = &Permission{ + "create_post_bleve_indexes_job", + "", + "", + PermissionScopeSystem, + } + + // DEPRECATED + PermissionManagePostBleveIndexesJob = &Permission{ + "manage_post_bleve_indexes_job", + "", + "", + PermissionScopeSystem, + } + + PermissionCreateLdapSyncJob = &Permission{ + "create_ldap_sync_job", + "", + "", + PermissionScopeSystem, + } + PermissionManageLdapSyncJob = &Permission{ + "manage_ldap_sync_job", + "", + "", + PermissionScopeSystem, + } + PermissionReadLdapSyncJob = &Permission{ + "read_ldap_sync_job", + "", + "", + PermissionScopeSystem, + } + + PermissionTestLdap = &Permission{ + "test_ldap", + "", + "", + PermissionScopeSystem, + } + + PermissionInvalidateEmailInvite = &Permission{ + "invalidate_email_invite", + "", + "", + PermissionScopeSystem, + } + PermissionGetSamlMetadataFromIdp = &Permission{ + "get_saml_metadata_from_idp", + "", + "", + PermissionScopeSystem, + } + PermissionAddSamlPublicCert = &Permission{ + "add_saml_public_cert", + "", + "", + PermissionScopeSystem, + } + + PermissionAddSamlPrivateCert = &Permission{ + "add_saml_private_cert", + "", + "", + PermissionScopeSystem, + } + + PermissionAddSamlIdpCert = &Permission{ + "add_saml_idp_cert", + "", + "", + PermissionScopeSystem, + } + + PermissionRemoveSamlPublicCert = &Permission{ + "remove_saml_public_cert", + "", + "", + PermissionScopeSystem, + } + + PermissionRemoveSamlPrivateCert = &Permission{ + "remove_saml_private_cert", + "", + "", + PermissionScopeSystem, + } + + PermissionRemoveSamlIdpCert = &Permission{ + "remove_saml_idp_cert", + "", + "", + PermissionScopeSystem, + } + + PermissionGetSamlCertStatus = &Permission{ + "get_saml_cert_status", + "", + "", + PermissionScopeSystem, + } + + PermissionAddLdapPublicCert = &Permission{ + "add_ldap_public_cert", + "", + "", + PermissionScopeSystem, + } + + PermissionAddLdapPrivateCert = &Permission{ + "add_ldap_private_cert", + "", + "", + PermissionScopeSystem, + } + + PermissionRemoveLdapPublicCert = &Permission{ + "remove_ldap_public_cert", + "", + "", + PermissionScopeSystem, + } + + PermissionRemoveLdapPrivateCert = &Permission{ + "remove_ldap_private_cert", + "", + "", + PermissionScopeSystem, + } + + PermissionGetLogs = &Permission{ + "get_logs", + "", + "", + PermissionScopeSystem, + } + + PermissionReadLicenseInformation = &Permission{ + "read_license_information", + "", + "", + PermissionScopeSystem, + } + + PermissionGetAnalytics = &Permission{ + "get_analytics", + "", + "", + PermissionScopeSystem, + } + + PermissionManageLicenseInformation = &Permission{ + "manage_license_information", + "", + "", + PermissionScopeSystem, + } + + PermissionDownloadComplianceExportResult = &Permission{ + "download_compliance_export_result", + "authentication.permissions.download_compliance_export_result.name", + "authentication.permissions.download_compliance_export_result.description", + PermissionScopeSystem, + } + + PermissionTestSiteURL = &Permission{ + "test_site_url", + "", + "", + PermissionScopeSystem, + } + PermissionTestElasticsearch = &Permission{ + "test_elasticsearch", + "", + "", + PermissionScopeSystem, + } + PermissionTestS3 = &Permission{ + "test_s3", + "", + "", + PermissionScopeSystem, + } + PermissionReloadConfig = &Permission{ + "reload_config", + "", + "", + PermissionScopeSystem, + } + PermissionInvalidateCaches = &Permission{ + "invalidate_caches", + "", + "", + PermissionScopeSystem, + } + PermissionRecycleDatabaseConnections = &Permission{ + "recycle_database_connections", + "", + "", + PermissionScopeSystem, + } + PermissionPurgeElasticsearchIndexes = &Permission{ + "purge_elasticsearch_indexes", + "", + "", + PermissionScopeSystem, + } + PermissionTestEmail = &Permission{ + "test_email", + "", + "", + PermissionScopeSystem, + } + PermissionCreateElasticsearchPostIndexingJob = &Permission{ + "create_elasticsearch_post_indexing_job", + "", + "", + PermissionScopeSystem, + } + PermissionManageElasticsearchPostIndexingJob = &Permission{ + "manage_elasticsearch_post_indexing_job", + "", + "", + PermissionScopeSystem, + } + PermissionCreateElasticsearchPostAggregationJob = &Permission{ + "create_elasticsearch_post_aggregation_job", + "", + "", + PermissionScopeSystem, + } + PermissionManageElasticsearchPostAggregationJob = &Permission{ + "manage_elasticsearch_post_aggregation_job", + "", + "", + PermissionScopeSystem, + } + PermissionReadElasticsearchPostIndexingJob = &Permission{ + "read_elasticsearch_post_indexing_job", + "", + "", + PermissionScopeSystem, + } + PermissionReadElasticsearchPostAggregationJob = &Permission{ + "read_elasticsearch_post_aggregation_job", + "", + "", + PermissionScopeSystem, + } + + PermissionRemoveUserFromTeam = &Permission{ + "remove_user_from_team", + "authentication.permissions.remove_user_from_team.name", + "authentication.permissions.remove_user_from_team.description", + PermissionScopeTeam, + } + PermissionCreateTeam = &Permission{ + "create_team", + "authentication.permissions.create_team.name", + "authentication.permissions.create_team.description", + PermissionScopeSystem, + } + PermissionManageTeam = &Permission{ + "manage_team", + "authentication.permissions.manage_team.name", + "authentication.permissions.manage_team.description", + PermissionScopeTeam, + } + PermissionImportTeam = &Permission{ + "import_team", + "authentication.permissions.import_team.name", + "authentication.permissions.import_team.description", + PermissionScopeTeam, + } + PermissionViewTeam = &Permission{ + "view_team", + "authentication.permissions.view_team.name", + "authentication.permissions.view_team.description", + PermissionScopeTeam, + } + PermissionListUsersWithoutTeam = &Permission{ + "list_users_without_team", + "authentication.permissions.list_users_without_team.name", + "authentication.permissions.list_users_without_team.description", + PermissionScopeSystem, + } + PermissionCreateUserAccessToken = &Permission{ + "create_user_access_token", + "authentication.permissions.create_user_access_token.name", + "authentication.permissions.create_user_access_token.description", + PermissionScopeSystem, + } + PermissionReadUserAccessToken = &Permission{ + "read_user_access_token", + "authentication.permissions.read_user_access_token.name", + "authentication.permissions.read_user_access_token.description", + PermissionScopeSystem, + } + PermissionRevokeUserAccessToken = &Permission{ + "revoke_user_access_token", + "authentication.permissions.revoke_user_access_token.name", + "authentication.permissions.revoke_user_access_token.description", + PermissionScopeSystem, + } + PermissionCreateBot = &Permission{ + "create_bot", + "authentication.permissions.create_bot.name", + "authentication.permissions.create_bot.description", + PermissionScopeSystem, + } + PermissionAssignBot = &Permission{ + "assign_bot", + "authentication.permissions.assign_bot.name", + "authentication.permissions.assign_bot.description", + PermissionScopeSystem, + } + PermissionReadBots = &Permission{ + "read_bots", + "authentication.permissions.read_bots.name", + "authentication.permissions.read_bots.description", + PermissionScopeSystem, + } + PermissionReadOthersBots = &Permission{ + "read_others_bots", + "authentication.permissions.read_others_bots.name", + "authentication.permissions.read_others_bots.description", + PermissionScopeSystem, + } + PermissionManageBots = &Permission{ + "manage_bots", + "authentication.permissions.manage_bots.name", + "authentication.permissions.manage_bots.description", + PermissionScopeSystem, + } + PermissionManageOthersBots = &Permission{ + "manage_others_bots", + "authentication.permissions.manage_others_bots.name", + "authentication.permissions.manage_others_bots.description", + PermissionScopeSystem, + } + PermissionReadJobs = &Permission{ + "read_jobs", + "authentication.permisssions.read_jobs.name", + "authentication.permisssions.read_jobs.description", + PermissionScopeSystem, + } + PermissionManageJobs = &Permission{ + "manage_jobs", + "authentication.permisssions.manage_jobs.name", + "authentication.permisssions.manage_jobs.description", + PermissionScopeSystem, + } + PermissionViewMembers = &Permission{ + "view_members", + "authentication.permisssions.view_members.name", + "authentication.permisssions.view_members.description", + PermissionScopeTeam, + } + PermissionInviteGuest = &Permission{ + "invite_guest", + "authentication.permissions.invite_guest.name", + "authentication.permissions.invite_guest.description", + PermissionScopeTeam, + } + PermissionPromoteGuest = &Permission{ + "promote_guest", + "authentication.permissions.promote_guest.name", + "authentication.permissions.promote_guest.description", + PermissionScopeSystem, + } + PermissionDemoteToGuest = &Permission{ + "demote_to_guest", + "authentication.permissions.demote_to_guest.name", + "authentication.permissions.demote_to_guest.description", + PermissionScopeSystem, + } + PermissionUseChannelMentions = &Permission{ + "use_channel_mentions", + "authentication.permissions.use_channel_mentions.name", + "authentication.permissions.use_channel_mentions.description", + PermissionScopeChannel, + } + PermissionUseGroupMentions = &Permission{ + "use_group_mentions", + "authentication.permissions.use_group_mentions.name", + "authentication.permissions.use_group_mentions.description", + PermissionScopeChannel, + } + + // Channel bookmarks + PermissionAddBookmarkPublicChannel = &Permission{ + "add_bookmark_public_channel", + "", + "", + PermissionScopeChannel, + } + PermissionEditBookmarkPublicChannel = &Permission{ + "edit_bookmark_public_channel", + "", + "", + PermissionScopeChannel, + } + PermissionDeleteBookmarkPublicChannel = &Permission{ + "delete_bookmark_public_channel", + "", + "", + PermissionScopeChannel, + } + PermissionOrderBookmarkPublicChannel = &Permission{ + "order_bookmark_public_channel", + "", + "", + PermissionScopeChannel, + } + PermissionAddBookmarkPrivateChannel = &Permission{ + "add_bookmark_private_channel", + "", + "", + PermissionScopeChannel, + } + PermissionEditBookmarkPrivateChannel = &Permission{ + "edit_bookmark_private_channel", + "", + "", + PermissionScopeChannel, + } + PermissionDeleteBookmarkPrivateChannel = &Permission{ + "delete_bookmark_private_channel", + "", + "", + PermissionScopeChannel, + } + PermissionOrderBookmarkPrivateChannel = &Permission{ + "order_bookmark_private_channel", + "", + "", + PermissionScopeChannel, + } + + PermissionManagePublicChannelBanner = &Permission{ + "manage_public_channel_banner", + "", + "", + PermissionScopeChannel, + } + + PermissionManagePrivateChannelBanner = &Permission{ + "manage_private_channel_banner", + "", + "", + PermissionScopeChannel, + } + + PermissionManageChannelAccessRules = &Permission{ + "manage_channel_access_rules", + "", + "", + PermissionScopeChannel, + } + + PermissionReadOtherUsersTeams = &Permission{ + "read_other_users_teams", + "authentication.permissions.read_other_users_teams.name", + "authentication.permissions.read_other_users_teams.description", + PermissionScopeSystem, + } + PermissionEditBrand = &Permission{ + "edit_brand", + "authentication.permissions.edit_brand.name", + "authentication.permissions.edit_brand.description", + PermissionScopeSystem, + } + // DEPRECATED + PermissionSysconsoleReadAbout = &Permission{ + "sysconsole_read_about", + "authentication.permissions.use_group_mentions.name", + "authentication.permissions.use_group_mentions.description", + PermissionScopeSystem, + } + // DEPRECATED + PermissionSysconsoleWriteAbout = &Permission{ + "sysconsole_write_about", + "authentication.permissions.use_group_mentions.name", + "authentication.permissions.use_group_mentions.description", + PermissionScopeSystem, + } + PermissionSysconsoleReadAboutEditionAndLicense = &Permission{ + "sysconsole_read_about_edition_and_license", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteAboutEditionAndLicense = &Permission{ + "sysconsole_write_about_edition_and_license", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleReadBilling = &Permission{ + "sysconsole_read_billing", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteBilling = &Permission{ + "sysconsole_write_billing", + "", + "", + PermissionScopeSystem, + } + // DEPRECATED + PermissionSysconsoleReadReporting = &Permission{ + "sysconsole_read_reporting", + "authentication.permissions.use_group_mentions.name", + "authentication.permissions.use_group_mentions.description", + PermissionScopeSystem, + } + // DEPRECATED + PermissionSysconsoleWriteReporting = &Permission{ + "sysconsole_write_reporting", + "authentication.permissions.use_group_mentions.name", + "authentication.permissions.use_group_mentions.description", + PermissionScopeSystem, + } + PermissionSysconsoleReadReportingSiteStatistics = &Permission{ + "sysconsole_read_reporting_site_statistics", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteReportingSiteStatistics = &Permission{ + "sysconsole_write_reporting_site_statistics", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleReadReportingTeamStatistics = &Permission{ + "sysconsole_read_reporting_team_statistics", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteReportingTeamStatistics = &Permission{ + "sysconsole_write_reporting_team_statistics", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleReadReportingServerLogs = &Permission{ + "sysconsole_read_reporting_server_logs", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteReportingServerLogs = &Permission{ + "sysconsole_write_reporting_server_logs", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleReadUserManagementUsers = &Permission{ + "sysconsole_read_user_management_users", + "authentication.permissions.use_group_mentions.name", + "authentication.permissions.use_group_mentions.description", + PermissionScopeSystem, + } + PermissionSysconsoleWriteUserManagementUsers = &Permission{ + "sysconsole_write_user_management_users", + "authentication.permissions.use_group_mentions.name", + "authentication.permissions.use_group_mentions.description", + PermissionScopeSystem, + } + PermissionSysconsoleReadUserManagementGroups = &Permission{ + "sysconsole_read_user_management_groups", + "authentication.permissions.use_group_mentions.name", + "authentication.permissions.use_group_mentions.description", + PermissionScopeSystem, + } + PermissionSysconsoleWriteUserManagementGroups = &Permission{ + "sysconsole_write_user_management_groups", + "authentication.permissions.use_group_mentions.name", + "authentication.permissions.use_group_mentions.description", + PermissionScopeSystem, + } + PermissionSysconsoleReadUserManagementTeams = &Permission{ + "sysconsole_read_user_management_teams", + "authentication.permissions.use_group_mentions.name", + "authentication.permissions.use_group_mentions.description", + PermissionScopeSystem, + } + PermissionSysconsoleWriteUserManagementTeams = &Permission{ + "sysconsole_write_user_management_teams", + "authentication.permissions.use_group_mentions.name", + "authentication.permissions.use_group_mentions.description", + PermissionScopeSystem, + } + PermissionSysconsoleReadUserManagementChannels = &Permission{ + "sysconsole_read_user_management_channels", + "authentication.permissions.use_group_mentions.name", + "authentication.permissions.use_group_mentions.description", + PermissionScopeSystem, + } + PermissionSysconsoleWriteUserManagementChannels = &Permission{ + "sysconsole_write_user_management_channels", + "authentication.permissions.use_group_mentions.name", + "authentication.permissions.use_group_mentions.description", + PermissionScopeSystem, + } + PermissionSysconsoleReadUserManagementPermissions = &Permission{ + "sysconsole_read_user_management_permissions", + "authentication.permissions.use_group_mentions.name", + "authentication.permissions.use_group_mentions.description", + PermissionScopeSystem, + } + PermissionSysconsoleWriteUserManagementPermissions = &Permission{ + "sysconsole_write_user_management_permissions", + "authentication.permissions.use_group_mentions.name", + "authentication.permissions.use_group_mentions.description", + PermissionScopeSystem, + } + PermissionSysconsoleReadUserManagementSystemRoles = &Permission{ + "sysconsole_read_user_management_system_roles", + "authentication.permissions.use_group_mentions.name", + "authentication.permissions.use_group_mentions.description", + PermissionScopeSystem, + } + PermissionSysconsoleWriteUserManagementSystemRoles = &Permission{ + "sysconsole_write_user_management_system_roles", + "authentication.permissions.use_group_mentions.name", + "authentication.permissions.use_group_mentions.description", + PermissionScopeSystem, + } + // DEPRECATED + PermissionSysconsoleReadEnvironment = &Permission{ + "sysconsole_read_environment", + "authentication.permissions.use_group_mentions.name", + "authentication.permissions.use_group_mentions.description", + PermissionScopeSystem, + } + // DEPRECATED + PermissionSysconsoleWriteEnvironment = &Permission{ + "sysconsole_write_environment", + "authentication.permissions.use_group_mentions.name", + "authentication.permissions.use_group_mentions.description", + PermissionScopeSystem, + } + PermissionSysconsoleReadEnvironmentWebServer = &Permission{ + "sysconsole_read_environment_web_server", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteEnvironmentWebServer = &Permission{ + "sysconsole_write_environment_web_server", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleReadEnvironmentDatabase = &Permission{ + "sysconsole_read_environment_database", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteEnvironmentDatabase = &Permission{ + "sysconsole_write_environment_database", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleReadEnvironmentElasticsearch = &Permission{ + "sysconsole_read_environment_elasticsearch", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteEnvironmentElasticsearch = &Permission{ + "sysconsole_write_environment_elasticsearch", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleReadEnvironmentFileStorage = &Permission{ + "sysconsole_read_environment_file_storage", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteEnvironmentFileStorage = &Permission{ + "sysconsole_write_environment_file_storage", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleReadEnvironmentImageProxy = &Permission{ + "sysconsole_read_environment_image_proxy", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteEnvironmentImageProxy = &Permission{ + "sysconsole_write_environment_image_proxy", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleReadEnvironmentSMTP = &Permission{ + "sysconsole_read_environment_smtp", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteEnvironmentSMTP = &Permission{ + "sysconsole_write_environment_smtp", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleReadEnvironmentPushNotificationServer = &Permission{ + "sysconsole_read_environment_push_notification_server", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteEnvironmentPushNotificationServer = &Permission{ + "sysconsole_write_environment_push_notification_server", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleReadEnvironmentHighAvailability = &Permission{ + "sysconsole_read_environment_high_availability", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteEnvironmentHighAvailability = &Permission{ + "sysconsole_write_environment_high_availability", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleReadEnvironmentRateLimiting = &Permission{ + "sysconsole_read_environment_rate_limiting", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteEnvironmentRateLimiting = &Permission{ + "sysconsole_write_environment_rate_limiting", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleReadEnvironmentLogging = &Permission{ + "sysconsole_read_environment_logging", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteEnvironmentLogging = &Permission{ + "sysconsole_write_environment_logging", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleReadEnvironmentSessionLengths = &Permission{ + "sysconsole_read_environment_session_lengths", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteEnvironmentSessionLengths = &Permission{ + "sysconsole_write_environment_session_lengths", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleReadEnvironmentPerformanceMonitoring = &Permission{ + "sysconsole_read_environment_performance_monitoring", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteEnvironmentPerformanceMonitoring = &Permission{ + "sysconsole_write_environment_performance_monitoring", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleReadEnvironmentDeveloper = &Permission{ + "sysconsole_read_environment_developer", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteEnvironmentDeveloper = &Permission{ + "sysconsole_write_environment_developer", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleReadEnvironmentMobileSecurity = &Permission{ + "sysconsole_read_environment_mobile_security", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteEnvironmentMobileSecurity = &Permission{ + "sysconsole_write_environment_mobile_security", + "", + "", + PermissionScopeSystem, + } + // DEPRECATED + PermissionSysconsoleReadSite = &Permission{ + "sysconsole_read_site", + "authentication.permissions.use_group_mentions.name", + "authentication.permissions.use_group_mentions.description", + PermissionScopeSystem, + } + // DEPRECATED + PermissionSysconsoleWriteSite = &Permission{ + "sysconsole_write_site", + "authentication.permissions.use_group_mentions.name", + "authentication.permissions.use_group_mentions.description", + PermissionScopeSystem, + } + + PermissionSysconsoleReadSiteCustomization = &Permission{ + "sysconsole_read_site_customization", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteSiteCustomization = &Permission{ + "sysconsole_write_site_customization", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleReadSiteLocalization = &Permission{ + "sysconsole_read_site_localization", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteSiteLocalization = &Permission{ + "sysconsole_write_site_localization", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleReadSiteUsersAndTeams = &Permission{ + "sysconsole_read_site_users_and_teams", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteSiteUsersAndTeams = &Permission{ + "sysconsole_write_site_users_and_teams", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleReadSiteNotifications = &Permission{ + "sysconsole_read_site_notifications", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteSiteNotifications = &Permission{ + "sysconsole_write_site_notifications", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleReadSiteAnnouncementBanner = &Permission{ + "sysconsole_read_site_announcement_banner", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteSiteAnnouncementBanner = &Permission{ + "sysconsole_write_site_announcement_banner", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleReadSiteEmoji = &Permission{ + "sysconsole_read_site_emoji", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteSiteEmoji = &Permission{ + "sysconsole_write_site_emoji", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleReadSitePosts = &Permission{ + "sysconsole_read_site_posts", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteSitePosts = &Permission{ + "sysconsole_write_site_posts", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleReadSiteFileSharingAndDownloads = &Permission{ + "sysconsole_read_site_file_sharing_and_downloads", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteSiteFileSharingAndDownloads = &Permission{ + "sysconsole_write_site_file_sharing_and_downloads", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleReadSitePublicLinks = &Permission{ + "sysconsole_read_site_public_links", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteSitePublicLinks = &Permission{ + "sysconsole_write_site_public_links", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleReadSiteNotices = &Permission{ + "sysconsole_read_site_notices", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteSiteNotices = &Permission{ + "sysconsole_write_site_notices", + "", + "", + PermissionScopeSystem, + } + + PermissionSysconsoleReadIPFilters = &Permission{ + "sysconsole_read_site_ip_filters", + "", + "", + PermissionScopeSystem, + } + + PermissionSysconsoleWriteIPFilters = &Permission{ + "sysconsole_write_site_ip_filters", + "", + "", + PermissionScopeSystem, + } + + // Deprecated + PermissionSysconsoleReadAuthentication = &Permission{ + "sysconsole_read_authentication", + "authentication.permissions.use_group_mentions.name", + "authentication.permissions.use_group_mentions.description", + PermissionScopeSystem, + } + // Deprecated + PermissionSysconsoleWriteAuthentication = &Permission{ + "sysconsole_write_authentication", + "authentication.permissions.use_group_mentions.name", + "authentication.permissions.use_group_mentions.description", + PermissionScopeSystem, + } + PermissionSysconsoleReadAuthenticationSignup = &Permission{ + "sysconsole_read_authentication_signup", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteAuthenticationSignup = &Permission{ + "sysconsole_write_authentication_signup", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleReadAuthenticationEmail = &Permission{ + "sysconsole_read_authentication_email", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteAuthenticationEmail = &Permission{ + "sysconsole_write_authentication_email", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleReadAuthenticationPassword = &Permission{ + "sysconsole_read_authentication_password", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteAuthenticationPassword = &Permission{ + "sysconsole_write_authentication_password", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleReadAuthenticationMfa = &Permission{ + "sysconsole_read_authentication_mfa", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteAuthenticationMfa = &Permission{ + "sysconsole_write_authentication_mfa", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleReadAuthenticationLdap = &Permission{ + "sysconsole_read_authentication_ldap", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteAuthenticationLdap = &Permission{ + "sysconsole_write_authentication_ldap", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleReadAuthenticationSaml = &Permission{ + "sysconsole_read_authentication_saml", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteAuthenticationSaml = &Permission{ + "sysconsole_write_authentication_saml", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleReadAuthenticationOpenid = &Permission{ + "sysconsole_read_authentication_openid", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteAuthenticationOpenid = &Permission{ + "sysconsole_write_authentication_openid", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleReadAuthenticationGuestAccess = &Permission{ + "sysconsole_read_authentication_guest_access", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteAuthenticationGuestAccess = &Permission{ + "sysconsole_write_authentication_guest_access", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleReadPlugins = &Permission{ + "sysconsole_read_plugins", + "authentication.permissions.use_group_mentions.name", + "authentication.permissions.use_group_mentions.description", + PermissionScopeSystem, + } + PermissionSysconsoleWritePlugins = &Permission{ + "sysconsole_write_plugins", + "authentication.permissions.use_group_mentions.name", + "authentication.permissions.use_group_mentions.description", + PermissionScopeSystem, + } + // DEPRECATED + PermissionSysconsoleReadIntegrations = &Permission{ + "sysconsole_read_integrations", + "authentication.permissions.use_group_mentions.name", + "authentication.permissions.use_group_mentions.description", + PermissionScopeSystem, + } + // DEPRECATED + PermissionSysconsoleWriteIntegrations = &Permission{ + "sysconsole_write_integrations", + "authentication.permissions.use_group_mentions.name", + "authentication.permissions.use_group_mentions.description", + PermissionScopeSystem, + } + PermissionSysconsoleReadIntegrationsIntegrationManagement = &Permission{ + "sysconsole_read_integrations_integration_management", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteIntegrationsIntegrationManagement = &Permission{ + "sysconsole_write_integrations_integration_management", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleReadIntegrationsBotAccounts = &Permission{ + "sysconsole_read_integrations_bot_accounts", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteIntegrationsBotAccounts = &Permission{ + "sysconsole_write_integrations_bot_accounts", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleReadIntegrationsGif = &Permission{ + "sysconsole_read_integrations_gif", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteIntegrationsGif = &Permission{ + "sysconsole_write_integrations_gif", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleReadIntegrationsCors = &Permission{ + "sysconsole_read_integrations_cors", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteIntegrationsCors = &Permission{ + "sysconsole_write_integrations_cors", + "", + "", + PermissionScopeSystem, + } + // DEPRECATED + PermissionSysconsoleReadCompliance = &Permission{ + "sysconsole_read_compliance", + "authentication.permissions.use_group_mentions.name", + "authentication.permissions.use_group_mentions.description", + PermissionScopeSystem, + } + // DEPRECATED + PermissionSysconsoleWriteCompliance = &Permission{ + "sysconsole_write_compliance", + "authentication.permissions.use_group_mentions.name", + "authentication.permissions.use_group_mentions.description", + PermissionScopeSystem, + } + PermissionSysconsoleReadComplianceDataRetentionPolicy = &Permission{ + "sysconsole_read_compliance_data_retention_policy", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteComplianceDataRetentionPolicy = &Permission{ + "sysconsole_write_compliance_data_retention_policy", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleReadComplianceComplianceExport = &Permission{ + "sysconsole_read_compliance_compliance_export", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteComplianceComplianceExport = &Permission{ + "sysconsole_write_compliance_compliance_export", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleReadComplianceComplianceMonitoring = &Permission{ + "sysconsole_read_compliance_compliance_monitoring", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteComplianceComplianceMonitoring = &Permission{ + "sysconsole_write_compliance_compliance_monitoring", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleReadComplianceCustomTermsOfService = &Permission{ + "sysconsole_read_compliance_custom_terms_of_service", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteComplianceCustomTermsOfService = &Permission{ + "sysconsole_write_compliance_custom_terms_of_service", + "", + "", + PermissionScopeSystem, + } + // DEPRECATED + PermissionSysconsoleReadExperimental = &Permission{ + "sysconsole_read_experimental", + "authentication.permissions.use_group_mentions.name", + "authentication.permissions.use_group_mentions.description", + PermissionScopeSystem, + } + // DEPRECATED + PermissionSysconsoleWriteExperimental = &Permission{ + "sysconsole_write_experimental", + "authentication.permissions.use_group_mentions.name", + "authentication.permissions.use_group_mentions.description", + PermissionScopeSystem, + } + PermissionSysconsoleReadExperimentalFeatures = &Permission{ + "sysconsole_read_experimental_features", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteExperimentalFeatures = &Permission{ + "sysconsole_write_experimental_features", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleReadExperimentalFeatureFlags = &Permission{ + "sysconsole_read_experimental_feature_flags", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteExperimentalFeatureFlags = &Permission{ + "sysconsole_write_experimental_feature_flags", + "", + "", + PermissionScopeSystem, + } + + // DEPRECATED + PermissionSysconsoleReadExperimentalBleve = &Permission{ + "sysconsole_read_experimental_bleve", + "", + "", + PermissionScopeSystem, + } + // DEPRECATED + PermissionSysconsoleWriteExperimentalBleve = &Permission{ + "sysconsole_write_experimental_bleve", + "", + "", + PermissionScopeSystem, + } + + PermissionCreateCustomGroup = &Permission{ + "create_custom_group", + "authentication.permissions.create_custom_group.name", + "authentication.permissions.create_custom_group.description", + PermissionScopeSystem, + } + + PermissionManageCustomGroupMembers = &Permission{ + "manage_custom_group_members", + "authentication.permissions.manage_custom_group_members.name", + "authentication.permissions.manage_custom_group_members.description", + PermissionScopeGroup, + } + + PermissionEditCustomGroup = &Permission{ + "edit_custom_group", + "authentication.permissions.edit_custom_group.name", + "authentication.permissions.edit_custom_group.description", + PermissionScopeGroup, + } + + PermissionDeleteCustomGroup = &Permission{ + "delete_custom_group", + "authentication.permissions.delete_custom_group.name", + "authentication.permissions.delete_custom_group.description", + PermissionScopeGroup, + } + + PermissionRestoreCustomGroup = &Permission{ + "restore_custom_group", + "authentication.permissions.restore_custom_group.name", + "authentication.permissions.restore_custom_group.description", + PermissionScopeGroup, + } + + // Playbooks + PermissionPublicPlaybookCreate = &Permission{ + "playbook_public_create", + "", + "", + PermissionScopeTeam, + } + + PermissionPublicPlaybookManageProperties = &Permission{ + "playbook_public_manage_properties", + "", + "", + PermissionScopePlaybook, + } + + PermissionPublicPlaybookManageMembers = &Permission{ + "playbook_public_manage_members", + "", + "", + PermissionScopePlaybook, + } + + PermissionPublicPlaybookManageRoles = &Permission{ + "playbook_public_manage_roles", + "", + "", + PermissionScopePlaybook, + } + + PermissionPublicPlaybookView = &Permission{ + "playbook_public_view", + "", + "", + PermissionScopePlaybook, + } + + PermissionPublicPlaybookMakePrivate = &Permission{ + "playbook_public_make_private", + "", + "", + PermissionScopePlaybook, + } + + PermissionPrivatePlaybookCreate = &Permission{ + "playbook_private_create", + "", + "", + PermissionScopeTeam, + } + + PermissionPrivatePlaybookManageProperties = &Permission{ + "playbook_private_manage_properties", + "", + "", + PermissionScopePlaybook, + } + + PermissionPrivatePlaybookManageMembers = &Permission{ + "playbook_private_manage_members", + "", + "", + PermissionScopePlaybook, + } + + PermissionPrivatePlaybookManageRoles = &Permission{ + "playbook_private_manage_roles", + "", + "", + PermissionScopePlaybook, + } + + PermissionPrivatePlaybookView = &Permission{ + "playbook_private_view", + "", + "", + PermissionScopePlaybook, + } + + PermissionPrivatePlaybookMakePublic = &Permission{ + "playbook_private_make_public", + "", + "", + PermissionScopePlaybook, + } + + PermissionRunCreate = &Permission{ + "run_create", + "", + "", + PermissionScopePlaybook, + } + + PermissionRunManageProperties = &Permission{ + "run_manage_properties", + "", + "", + PermissionScopeRun, + } + + PermissionRunManageMembers = &Permission{ + "run_manage_members", + "", + "", + PermissionScopeRun, + } + + PermissionRunView = &Permission{ + "run_view", + "", + "", + PermissionScopeRun, + } + + PermissionSysconsoleReadProductsBoards = &Permission{ + "sysconsole_read_products_boards", + "", + "", + PermissionScopeSystem, + } + PermissionSysconsoleWriteProductsBoards = &Permission{ + "sysconsole_write_products_boards", + "", + "", + PermissionScopeSystem, + } + + PermissionManageOutgoingOAuthConnections = &Permission{ + "manage_outgoing_oauth_connections", + "authentication.permissions.manage_outgoing_oauth_connections.name", + "authentication.permissions.manage_outgoing_oauth_connections.description", + PermissionScopeSystem, + } + + SysconsoleReadPermissions = []*Permission{ + PermissionSysconsoleReadAboutEditionAndLicense, + PermissionSysconsoleReadBilling, + PermissionSysconsoleReadReportingSiteStatistics, + PermissionSysconsoleReadReportingTeamStatistics, + PermissionSysconsoleReadReportingServerLogs, + PermissionSysconsoleReadUserManagementUsers, + PermissionSysconsoleReadUserManagementGroups, + PermissionSysconsoleReadUserManagementTeams, + PermissionSysconsoleReadUserManagementChannels, + PermissionSysconsoleReadUserManagementPermissions, + PermissionSysconsoleReadUserManagementSystemRoles, + PermissionSysconsoleReadEnvironmentWebServer, + PermissionSysconsoleReadEnvironmentDatabase, + PermissionSysconsoleReadEnvironmentElasticsearch, + PermissionSysconsoleReadEnvironmentFileStorage, + PermissionSysconsoleReadEnvironmentImageProxy, + PermissionSysconsoleReadEnvironmentSMTP, + PermissionSysconsoleReadEnvironmentPushNotificationServer, + PermissionSysconsoleReadEnvironmentHighAvailability, + PermissionSysconsoleReadEnvironmentRateLimiting, + PermissionSysconsoleReadEnvironmentLogging, + PermissionSysconsoleReadEnvironmentSessionLengths, + PermissionSysconsoleReadEnvironmentPerformanceMonitoring, + PermissionSysconsoleReadEnvironmentDeveloper, + PermissionSysconsoleReadEnvironmentMobileSecurity, + PermissionSysconsoleReadSiteCustomization, + PermissionSysconsoleReadSiteLocalization, + PermissionSysconsoleReadSiteUsersAndTeams, + PermissionSysconsoleReadSiteNotifications, + PermissionSysconsoleReadSiteAnnouncementBanner, + PermissionSysconsoleReadSiteEmoji, + PermissionSysconsoleReadSitePosts, + PermissionSysconsoleReadSiteFileSharingAndDownloads, + PermissionSysconsoleReadSitePublicLinks, + PermissionSysconsoleReadSiteNotices, + PermissionSysconsoleReadAuthenticationSignup, + PermissionSysconsoleReadAuthenticationEmail, + PermissionSysconsoleReadAuthenticationPassword, + PermissionSysconsoleReadAuthenticationMfa, + PermissionSysconsoleReadAuthenticationLdap, + PermissionSysconsoleReadAuthenticationSaml, + PermissionSysconsoleReadAuthenticationOpenid, + PermissionSysconsoleReadAuthenticationGuestAccess, + PermissionSysconsoleReadPlugins, + PermissionSysconsoleReadIntegrationsIntegrationManagement, + PermissionSysconsoleReadIntegrationsBotAccounts, + PermissionSysconsoleReadIntegrationsGif, + PermissionSysconsoleReadIntegrationsCors, + PermissionSysconsoleReadComplianceDataRetentionPolicy, + PermissionSysconsoleReadComplianceComplianceExport, + PermissionSysconsoleReadComplianceComplianceMonitoring, + PermissionSysconsoleReadComplianceCustomTermsOfService, + PermissionSysconsoleReadExperimentalFeatures, + PermissionSysconsoleReadExperimentalFeatureFlags, + PermissionSysconsoleReadProductsBoards, + PermissionSysconsoleReadIPFilters, + } + + SysconsoleWritePermissions = []*Permission{ + PermissionSysconsoleWriteAboutEditionAndLicense, + PermissionSysconsoleWriteBilling, + PermissionSysconsoleWriteReportingSiteStatistics, + PermissionSysconsoleWriteReportingTeamStatistics, + PermissionSysconsoleWriteReportingServerLogs, + PermissionSysconsoleWriteUserManagementUsers, + PermissionSysconsoleWriteUserManagementGroups, + PermissionSysconsoleWriteUserManagementTeams, + PermissionSysconsoleWriteUserManagementChannels, + PermissionSysconsoleWriteUserManagementPermissions, + PermissionSysconsoleWriteUserManagementSystemRoles, + PermissionSysconsoleWriteEnvironmentWebServer, + PermissionSysconsoleWriteEnvironmentDatabase, + PermissionSysconsoleWriteEnvironmentElasticsearch, + PermissionSysconsoleWriteEnvironmentFileStorage, + PermissionSysconsoleWriteEnvironmentImageProxy, + PermissionSysconsoleWriteEnvironmentSMTP, + PermissionSysconsoleWriteEnvironmentPushNotificationServer, + PermissionSysconsoleWriteEnvironmentHighAvailability, + PermissionSysconsoleWriteEnvironmentRateLimiting, + PermissionSysconsoleWriteEnvironmentLogging, + PermissionSysconsoleWriteEnvironmentSessionLengths, + PermissionSysconsoleWriteEnvironmentPerformanceMonitoring, + PermissionSysconsoleWriteEnvironmentDeveloper, + PermissionSysconsoleWriteEnvironmentMobileSecurity, + PermissionSysconsoleWriteSiteCustomization, + PermissionSysconsoleWriteSiteLocalization, + PermissionSysconsoleWriteSiteUsersAndTeams, + PermissionSysconsoleWriteSiteNotifications, + PermissionSysconsoleWriteSiteAnnouncementBanner, + PermissionSysconsoleWriteSiteEmoji, + PermissionSysconsoleWriteSitePosts, + PermissionSysconsoleWriteSiteFileSharingAndDownloads, + PermissionSysconsoleWriteSitePublicLinks, + PermissionSysconsoleWriteSiteNotices, + PermissionSysconsoleWriteAuthenticationSignup, + PermissionSysconsoleWriteAuthenticationEmail, + PermissionSysconsoleWriteAuthenticationPassword, + PermissionSysconsoleWriteAuthenticationMfa, + PermissionSysconsoleWriteAuthenticationLdap, + PermissionSysconsoleWriteAuthenticationSaml, + PermissionSysconsoleWriteAuthenticationOpenid, + PermissionSysconsoleWriteAuthenticationGuestAccess, + PermissionSysconsoleWritePlugins, + PermissionSysconsoleWriteIntegrationsIntegrationManagement, + PermissionSysconsoleWriteIntegrationsBotAccounts, + PermissionSysconsoleWriteIntegrationsGif, + PermissionSysconsoleWriteIntegrationsCors, + PermissionSysconsoleWriteComplianceDataRetentionPolicy, + PermissionSysconsoleWriteComplianceComplianceExport, + PermissionSysconsoleWriteComplianceComplianceMonitoring, + PermissionSysconsoleWriteComplianceCustomTermsOfService, + PermissionSysconsoleWriteExperimentalFeatures, + PermissionSysconsoleWriteExperimentalFeatureFlags, + PermissionSysconsoleWriteProductsBoards, + PermissionSysconsoleWriteIPFilters, + } + + SystemScopedPermissionsMinusSysconsole := []*Permission{ + PermissionAssignSystemAdminRole, + PermissionManageRoles, + PermissionManageSystem, + PermissionCreateDirectChannel, + PermissionCreateGroupChannel, + PermissionListPublicTeams, + PermissionJoinPublicTeams, + PermissionListPrivateTeams, + PermissionJoinPrivateTeams, + PermissionEditOtherUsers, + PermissionReadOtherUsersTeams, + PermissionGetPublicLink, + PermissionManageSystemWideOAuth, + PermissionCreateTeam, + PermissionListUsersWithoutTeam, + PermissionCreateUserAccessToken, + PermissionReadUserAccessToken, + PermissionRevokeUserAccessToken, + PermissionCreateBot, + PermissionAssignBot, + PermissionReadBots, + PermissionReadOthersBots, + PermissionManageBots, + PermissionManageOthersBots, + PermissionReadJobs, + PermissionManageJobs, + PermissionPromoteGuest, + PermissionDemoteToGuest, + PermissionEditBrand, + PermissionManageSharedChannels, + PermissionManageSecureConnections, + PermissionDownloadComplianceExportResult, + PermissionCreateDataRetentionJob, + PermissionManageDataRetentionJob, + PermissionReadDataRetentionJob, + PermissionCreateComplianceExportJob, + PermissionManageComplianceExportJob, + PermissionReadComplianceExportJob, + PermissionReadAudits, + PermissionTestSiteURL, + PermissionTestElasticsearch, + PermissionTestS3, + PermissionReloadConfig, + PermissionInvalidateCaches, + PermissionRecycleDatabaseConnections, + PermissionPurgeElasticsearchIndexes, + PermissionTestEmail, + PermissionCreateElasticsearchPostIndexingJob, + PermissionManageElasticsearchPostIndexingJob, + PermissionCreateElasticsearchPostAggregationJob, + PermissionManageElasticsearchPostAggregationJob, + PermissionReadElasticsearchPostIndexingJob, + PermissionReadElasticsearchPostAggregationJob, + PermissionCreateLdapSyncJob, + PermissionManageLdapSyncJob, + PermissionReadLdapSyncJob, + PermissionTestLdap, + PermissionInvalidateEmailInvite, + PermissionGetSamlMetadataFromIdp, + PermissionAddSamlPublicCert, + PermissionAddSamlPrivateCert, + PermissionAddSamlIdpCert, + PermissionRemoveSamlPublicCert, + PermissionRemoveSamlPrivateCert, + PermissionRemoveSamlIdpCert, + PermissionGetSamlCertStatus, + PermissionAddLdapPublicCert, + PermissionAddLdapPrivateCert, + PermissionRemoveLdapPublicCert, + PermissionRemoveLdapPrivateCert, + PermissionGetAnalytics, + PermissionGetLogs, + PermissionReadLicenseInformation, + PermissionManageLicenseInformation, + PermissionCreateCustomGroup, + PermissionManageOutgoingOAuthConnections, + } + + TeamScopedPermissions := []*Permission{ + PermissionInviteUser, + PermissionAddUserToTeam, + PermissionManageOwnSlashCommands, + PermissionManageOthersSlashCommands, + PermissionCreatePublicChannel, + PermissionCreatePrivateChannel, + PermissionManageTeamRoles, + PermissionListTeamChannels, + PermissionJoinPublicChannels, + PermissionReadPublicChannel, + PermissionManageOwnIncomingWebhooks, + PermissionManageOwnOutgoingWebhooks, + PermissionManageOthersIncomingWebhooks, + PermissionManageOthersOutgoingWebhooks, + PermissionBypassIncomingWebhookChannelLock, + PermissionCreateEmojis, + PermissionDeleteEmojis, + PermissionDeleteOthersEmojis, + PermissionRemoveUserFromTeam, + PermissionManageTeam, + PermissionImportTeam, + PermissionViewTeam, + PermissionViewMembers, + PermissionInviteGuest, + PermissionPublicPlaybookCreate, + PermissionPrivatePlaybookCreate, + } + + ChannelScopedPermissions := []*Permission{ + PermissionUseSlashCommands, + PermissionManagePublicChannelMembers, + PermissionManagePrivateChannelMembers, + PermissionManageChannelRoles, + PermissionManagePublicChannelProperties, + PermissionManagePrivateChannelProperties, + PermissionConvertPublicChannelToPrivate, + PermissionConvertPrivateChannelToPublic, + PermissionDeletePublicChannel, + PermissionDeletePrivateChannel, + PermissionReadChannel, + PermissionReadChannelContent, + PermissionReadPublicChannelGroups, + PermissionReadPrivateChannelGroups, + PermissionAddReaction, + PermissionRemoveReaction, + PermissionRemoveOthersReactions, + PermissionUploadFile, + PermissionCreatePost, + PermissionCreatePostPublic, + PermissionCreatePostEphemeral, + PermissionReadDeletedPosts, + PermissionEditPost, + PermissionEditOthersPosts, + PermissionDeletePost, + PermissionDeleteOthersPosts, + PermissionUseChannelMentions, + PermissionUseGroupMentions, + PermissionAddBookmarkPublicChannel, + PermissionEditBookmarkPublicChannel, + PermissionDeleteBookmarkPublicChannel, + PermissionOrderBookmarkPublicChannel, + PermissionAddBookmarkPrivateChannel, + PermissionEditBookmarkPrivateChannel, + PermissionDeleteBookmarkPrivateChannel, + PermissionOrderBookmarkPrivateChannel, + PermissionManagePublicChannelBanner, + PermissionManagePrivateChannelBanner, + PermissionManageChannelAccessRules, + } + + GroupScopedPermissions := []*Permission{ + PermissionManageCustomGroupMembers, + PermissionEditCustomGroup, + PermissionDeleteCustomGroup, + PermissionRestoreCustomGroup, + } + + DeprecatedPermissions = []*Permission{ + PermissionPermanentDeleteUser, + PermissionManageWebhooks, + PermissionManageOthersWebhooks, + PermissionManageIncomingWebhooks, + PermissionManageOutgoingWebhooks, + PermissionManageSlashCommands, + PermissionManageOAuth, + PermissionManageEmojis, + PermissionManageOthersEmojis, + PermissionSysconsoleReadAuthentication, + PermissionSysconsoleWriteAuthentication, + PermissionSysconsoleReadSite, + PermissionSysconsoleWriteSite, + PermissionSysconsoleReadEnvironment, + PermissionSysconsoleWriteEnvironment, + PermissionSysconsoleReadReporting, + PermissionSysconsoleWriteReporting, + PermissionSysconsoleReadAbout, + PermissionSysconsoleWriteAbout, + PermissionSysconsoleReadExperimental, + PermissionSysconsoleWriteExperimental, + PermissionSysconsoleReadIntegrations, + PermissionSysconsoleWriteIntegrations, + PermissionSysconsoleReadCompliance, + PermissionSysconsoleWriteCompliance, + PermissionPurgeBleveIndexes, + PermissionCreatePostBleveIndexesJob, + PermissionManagePostBleveIndexesJob, + PermissionSysconsoleReadExperimentalBleve, + PermissionSysconsoleWriteExperimentalBleve, + } + + PlaybookScopedPermissions := []*Permission{ + PermissionPublicPlaybookManageProperties, + PermissionPublicPlaybookManageMembers, + PermissionPublicPlaybookManageRoles, + PermissionPublicPlaybookView, + PermissionPublicPlaybookMakePrivate, + PermissionPrivatePlaybookManageProperties, + PermissionPrivatePlaybookManageMembers, + PermissionPrivatePlaybookManageRoles, + PermissionPrivatePlaybookView, + PermissionPrivatePlaybookMakePublic, + PermissionRunCreate, + } + + RunScopedPermissions := []*Permission{ + PermissionRunManageProperties, + PermissionRunManageMembers, + PermissionRunView, + } + + AllPermissions = []*Permission{} + AllPermissions = append(AllPermissions, SystemScopedPermissionsMinusSysconsole...) + AllPermissions = append(AllPermissions, TeamScopedPermissions...) + AllPermissions = append(AllPermissions, ChannelScopedPermissions...) + AllPermissions = append(AllPermissions, SysconsoleReadPermissions...) + AllPermissions = append(AllPermissions, SysconsoleWritePermissions...) + AllPermissions = append(AllPermissions, GroupScopedPermissions...) + AllPermissions = append(AllPermissions, PlaybookScopedPermissions...) + AllPermissions = append(AllPermissions, RunScopedPermissions...) + + ChannelModeratedPermissions = []string{ + PermissionCreatePost.Id, + "create_reactions", + "manage_members", + PermissionUseChannelMentions.Id, + "manage_bookmarks", + } + + ChannelModeratedPermissionsMap = map[string]string{ + PermissionCreatePost.Id: ChannelModeratedPermissions[0], + PermissionAddReaction.Id: ChannelModeratedPermissions[1], + PermissionRemoveReaction.Id: ChannelModeratedPermissions[1], + PermissionManagePublicChannelMembers.Id: ChannelModeratedPermissions[2], + PermissionManagePrivateChannelMembers.Id: ChannelModeratedPermissions[2], + PermissionUseChannelMentions.Id: ChannelModeratedPermissions[3], + } + + ModeratedBookmarkPermissions = []*Permission{ + PermissionAddBookmarkPublicChannel, + PermissionEditBookmarkPublicChannel, + PermissionDeleteBookmarkPublicChannel, + PermissionOrderBookmarkPublicChannel, + PermissionAddBookmarkPrivateChannel, + PermissionEditBookmarkPrivateChannel, + PermissionDeleteBookmarkPrivateChannel, + PermissionOrderBookmarkPrivateChannel, + } + + for _, mbp := range ModeratedBookmarkPermissions { + ChannelModeratedPermissionsMap[mbp.Id] = ChannelModeratedPermissions[4] + } +} + +func init() { + initializePermissions() +} + +func MakePermissionError(s *Session, permissions []*Permission) *AppError { + return MakePermissionErrorForUser(s.UserId, permissions) +} + +func MakePermissionErrorForUser(userId string, permissions []*Permission) *AppError { + var permissionsStr strings.Builder + permissionsStr.WriteString("permission=") + for i, permission := range permissions { + permissionsStr.WriteString(permission.Id) + if i != len(permissions)-1 { + permissionsStr.WriteString(",") + } + } + return NewAppError("Permissions", "api.context.permissions.app_error", nil, "userId="+userId+", "+permissionsStr.String(), http.StatusForbidden) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/plugin_cluster_event.go b/vendor/github.com/mattermost/mattermost/server/public/model/plugin_cluster_event.go new file mode 100644 index 00000000..9e227447 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/plugin_cluster_event.go @@ -0,0 +1,28 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +const ( + PluginClusterEventSendTypeReliable = ClusterSendReliable + PluginClusterEventSendTypeBestEffort = ClusterSendBestEffort +) + +// PluginClusterEvent is used to allow intra-cluster plugin communication. +type PluginClusterEvent struct { + // Id is the unique identifier for the event. + Id string + // Data is the event payload. + Data []byte +} + +// PluginClusterEventSendOptions defines some properties that apply when sending +// plugin events across a cluster. +type PluginClusterEventSendOptions struct { + // SendType defines the type of communication channel used to send the event. + SendType string + // TargetId identifies the cluster node to which the event should be sent. + // It should match the cluster id of the receiving instance. + // If empty, the event gets broadcasted to all other nodes. + TargetId string +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/plugin_constants.go b/vendor/github.com/mattermost/mattermost/server/public/model/plugin_constants.go new file mode 100644 index 00000000..85e4612a --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/plugin_constants.go @@ -0,0 +1,14 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +const ( + PluginIdPlaybooks = "playbooks" + PluginIdFocalboard = "focalboard" + PluginIdApps = "com.mattermost.apps" + PluginIdCalls = "com.mattermost.calls" + PluginIdNPS = "com.mattermost.nps" + PluginIdChannelExport = "com.mattermost.plugin-channel-export" + PluginIdAI = "mattermost-ai" +) diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/plugin_event_data.go b/vendor/github.com/mattermost/mattermost/server/public/model/plugin_event_data.go new file mode 100644 index 00000000..1253533d --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/plugin_event_data.go @@ -0,0 +1,9 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +// PluginEventData used to notify peers about plugin changes. +type PluginEventData struct { + Id string `json:"id"` +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/plugin_key_value.go b/vendor/github.com/mattermost/mattermost/server/public/model/plugin_key_value.go new file mode 100644 index 00000000..f9433f7f --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/plugin_key_value.go @@ -0,0 +1,33 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "net/http" + "unicode/utf8" +) + +const ( + KeyValuePluginIdMaxRunes = 190 + KeyValueKeyMaxRunes = 150 +) + +type PluginKeyValue struct { + PluginId string `json:"plugin_id"` + Key string `json:"key" db:"PKey"` + Value []byte `json:"value" db:"PValue"` + ExpireAt int64 `json:"expire_at"` +} + +func (kv *PluginKeyValue) IsValid() *AppError { + if kv.PluginId == "" || utf8.RuneCountInString(kv.PluginId) > KeyValuePluginIdMaxRunes { + return NewAppError("PluginKeyValue.IsValid", "model.plugin_key_value.is_valid.plugin_id.app_error", map[string]any{"Max": KeyValueKeyMaxRunes, "Min": 0}, "key="+kv.Key, http.StatusBadRequest) + } + + if kv.Key == "" || utf8.RuneCountInString(kv.Key) > KeyValueKeyMaxRunes { + return NewAppError("PluginKeyValue.IsValid", "model.plugin_key_value.is_valid.key.app_error", map[string]any{"Max": KeyValueKeyMaxRunes, "Min": 0}, "key="+kv.Key, http.StatusBadRequest) + } + + return nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/plugin_kvset_options.go b/vendor/github.com/mattermost/mattermost/server/public/model/plugin_kvset_options.go new file mode 100644 index 00000000..1d374c80 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/plugin_kvset_options.go @@ -0,0 +1,47 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "net/http" +) + +// PluginKVSetOptions contains information on how to store a value in the plugin KV store. +type PluginKVSetOptions struct { + Atomic bool // Only store the value if the current value matches the oldValue + OldValue []byte // The value to compare with the current value. Only used when Atomic is true + ExpireInSeconds int64 // Set an expire counter +} + +// IsValid returns nil if the chosen options are valid. +func (opt *PluginKVSetOptions) IsValid() *AppError { + if !opt.Atomic && opt.OldValue != nil { + return NewAppError( + "PluginKVSetOptions.IsValid", + "model.plugin_kvset_options.is_valid.old_value.app_error", + nil, + "", + http.StatusBadRequest, + ) + } + + return nil +} + +// NewPluginKeyValueFromOptions return a PluginKeyValue given a pluginID, a KV pair and options. +func NewPluginKeyValueFromOptions(pluginId, key string, value []byte, opt PluginKVSetOptions) (*PluginKeyValue, *AppError) { + expireAt := int64(0) + if opt.ExpireInSeconds != 0 { + expireAt = GetMillis() + (opt.ExpireInSeconds * 1000) + } + + kv := &PluginKeyValue{ + PluginId: pluginId, + Key: key, + Value: value, + ExpireAt: expireAt, + } + + return kv, nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/plugin_on_install_event.go b/vendor/github.com/mattermost/mattermost/server/public/model/plugin_on_install_event.go new file mode 100644 index 00000000..186fd5bd --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/plugin_on_install_event.go @@ -0,0 +1,9 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +// OnInstallEvent is sent to the plugin when it gets installed. +type OnInstallEvent struct { + UserId string // The user who installed the plugin +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/plugin_reattach.go b/vendor/github.com/mattermost/mattermost/server/public/model/plugin_reattach.go new file mode 100644 index 00000000..77f5552b --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/plugin_reattach.go @@ -0,0 +1,59 @@ +package model + +import ( + "net" + "net/http" + + "github.com/hashicorp/go-plugin" +) + +// PluginReattachConfig is a serializable version of go-plugin's ReattachConfig. +type PluginReattachConfig struct { + Protocol string + ProtocolVersion int + Addr net.UnixAddr + Pid int + Test bool +} + +func NewPluginReattachConfig(pluginReattachmentConfig *plugin.ReattachConfig) *PluginReattachConfig { + return &PluginReattachConfig{ + Protocol: string(pluginReattachmentConfig.Protocol), + ProtocolVersion: pluginReattachmentConfig.ProtocolVersion, + Addr: net.UnixAddr{ + Name: pluginReattachmentConfig.Addr.String(), + Net: pluginReattachmentConfig.Addr.Network(), + }, + Pid: pluginReattachmentConfig.Pid, + Test: pluginReattachmentConfig.Test, + } +} + +func (prc *PluginReattachConfig) ToHashicorpPluginReattachmentConfig() *plugin.ReattachConfig { + addr := prc.Addr + + return &plugin.ReattachConfig{ + Protocol: plugin.Protocol(prc.Protocol), + ProtocolVersion: prc.ProtocolVersion, + Addr: &addr, + Pid: prc.Pid, + ReattachFunc: nil, + Test: prc.Test, + } +} + +type PluginReattachRequest struct { + Manifest *Manifest + PluginReattachConfig *PluginReattachConfig +} + +func (prr *PluginReattachRequest) IsValid() *AppError { + if prr.Manifest == nil { + return NewAppError("PluginReattachRequest.IsValid", "plugin_reattach_request.is_valid.manifest.app_error", nil, "", http.StatusBadRequest) + } + if prr.PluginReattachConfig == nil { + return NewAppError("PluginReattachRequest.IsValid", "plugin_reattach_request.is_valid.plugin_reattach_config.app_error", nil, "", http.StatusBadRequest) + } + + return nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/plugin_status.go b/vendor/github.com/mattermost/mattermost/server/public/model/plugin_status.go new file mode 100644 index 00000000..63e94c16 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/plugin_status.go @@ -0,0 +1,27 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +const ( + PluginStateNotRunning = 0 + PluginStateStarting = 1 // unused by server + PluginStateRunning = 2 + PluginStateFailedToStart = 3 + PluginStateFailedToStayRunning = 4 + PluginStateStopping = 5 // unused by server +) + +// PluginStatus provides a cluster-aware view of installed plugins. +type PluginStatus struct { + PluginId string `json:"plugin_id"` + ClusterId string `json:"cluster_id"` + PluginPath string `json:"plugin_path"` + State int `json:"state"` + Error string `json:"error"` + Name string `json:"name"` + Description string `json:"description"` + Version string `json:"version"` +} + +type PluginStatuses []*PluginStatus diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/plugin_valid.go b/vendor/github.com/mattermost/mattermost/server/public/model/plugin_valid.go new file mode 100644 index 00000000..6c772e58 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/plugin_valid.go @@ -0,0 +1,40 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "regexp" + "unicode/utf8" +) + +const ( + MinIdLength = 3 + MaxIdLength = 190 + ValidIdRegex = `^[a-zA-Z0-9-_\.]+$` +) + +// ValidId constrains the set of valid plugin identifiers: +// +// ^[a-zA-Z0-9-_\.]+ +var validId *regexp.Regexp + +func init() { + validId = regexp.MustCompile(ValidIdRegex) +} + +// IsValidPluginId verifies that the plugin id has a minimum length of 3, maximum length of 190, and +// contains only alphanumeric characters, dashes, underscores and periods. +// +// These constraints are necessary since the plugin id is used as part of a filesystem path. +func IsValidPluginId(id string) bool { + if utf8.RuneCountInString(id) < MinIdLength { + return false + } + + if utf8.RuneCountInString(id) > MaxIdLength { + return false + } + + return validId.MatchString(id) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/plugins_response.go b/vendor/github.com/mattermost/mattermost/server/public/model/plugins_response.go new file mode 100644 index 00000000..5aed0b3c --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/plugins_response.go @@ -0,0 +1,13 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +type PluginInfo struct { + Manifest +} + +type PluginsResponse struct { + Active []*PluginInfo `json:"active"` + Inactive []*PluginInfo `json:"inactive"` +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/post.go b/vendor/github.com/mattermost/mattermost/server/public/model/post.go new file mode 100644 index 00000000..a5bcf70a --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/post.go @@ -0,0 +1,1168 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "encoding/json" + "errors" + "fmt" + "io" + "maps" + "net/http" + "regexp" + "sort" + "strings" + "sync" + "unicode/utf8" + + "github.com/hashicorp/go-multierror" + "github.com/mattermost/mattermost/server/public/shared/markdown" + "github.com/mattermost/mattermost/server/public/shared/mlog" +) + +const ( + PostSystemMessagePrefix = "system_" + PostTypeDefault = "" + PostTypeSlackAttachment = "slack_attachment" + PostTypeSystemGeneric = "system_generic" + PostTypeJoinLeave = "system_join_leave" // Deprecated, use PostJoinChannel or PostLeaveChannel instead + PostTypeJoinChannel = "system_join_channel" + PostTypeGuestJoinChannel = "system_guest_join_channel" + PostTypeLeaveChannel = "system_leave_channel" + PostTypeJoinTeam = "system_join_team" + PostTypeLeaveTeam = "system_leave_team" + PostTypeAutoResponder = "system_auto_responder" + PostTypeAddRemove = "system_add_remove" // Deprecated, use PostAddToChannel or PostRemoveFromChannel instead + PostTypeAddToChannel = "system_add_to_channel" + PostTypeAddGuestToChannel = "system_add_guest_to_chan" + PostTypeRemoveFromChannel = "system_remove_from_channel" + PostTypeMoveChannel = "system_move_channel" + PostTypeAddToTeam = "system_add_to_team" + PostTypeRemoveFromTeam = "system_remove_from_team" + PostTypeHeaderChange = "system_header_change" + PostTypeDisplaynameChange = "system_displayname_change" + PostTypeConvertChannel = "system_convert_channel" + PostTypePurposeChange = "system_purpose_change" + PostTypeChannelDeleted = "system_channel_deleted" + PostTypeChannelRestored = "system_channel_restored" + PostTypeEphemeral = "system_ephemeral" + PostTypeChangeChannelPrivacy = "system_change_chan_privacy" + PostTypeWrangler = "system_wrangler" + PostTypeGMConvertedToChannel = "system_gm_to_channel" + PostTypeAddBotTeamsChannels = "add_bot_teams_channels" + PostTypeMe = "me" + PostCustomTypePrefix = "custom_" + PostTypeReminder = "reminder" + + PostFileidsMaxRunes = 300 + PostFilenamesMaxRunes = 4000 + PostHashtagsMaxRunes = 1000 + PostMessageMaxRunesV1 = 4000 + PostMessageMaxBytesV2 = 65535 // Maximum size of a TEXT column in MySQL + PostMessageMaxRunesV2 = PostMessageMaxBytesV2 / 4 // Assume a worst-case representation + PostPropsMaxRunes = 800000 + PostPropsMaxUserRunes = PostPropsMaxRunes - 40000 // Leave some room for system / pre-save modifications + + PropsAddChannelMember = "add_channel_member" + + PostPropsAddedUserId = "addedUserId" + PostPropsDeleteBy = "deleteBy" + PostPropsOverrideIconURL = "override_icon_url" + PostPropsOverrideIconEmoji = "override_icon_emoji" + PostPropsOverrideUsername = "override_username" + PostPropsFromWebhook = "from_webhook" + PostPropsFromBot = "from_bot" + PostPropsFromOAuthApp = "from_oauth_app" + PostPropsWebhookDisplayName = "webhook_display_name" + PostPropsAttachments = "attachments" + PostPropsFromPlugin = "from_plugin" + PostPropsMentionHighlightDisabled = "mentionHighlightDisabled" + PostPropsGroupHighlightDisabled = "disable_group_highlight" + PostPropsPreviewedPost = "previewed_post" + PostPropsForceNotification = "force_notification" + PostPropsChannelMentions = "channel_mentions" + PostPropsUnsafeLinks = "unsafe_links" + PostPropsAIGeneratedByUserID = "ai_generated_by" + PostPropsAIGeneratedByUsername = "ai_generated_by_username" + + PostPriorityUrgent = "urgent" +) + +type Post struct { + Id string `json:"id"` + CreateAt int64 `json:"create_at"` + UpdateAt int64 `json:"update_at"` + EditAt int64 `json:"edit_at"` + DeleteAt int64 `json:"delete_at"` + IsPinned bool `json:"is_pinned"` + UserId string `json:"user_id"` + ChannelId string `json:"channel_id"` + RootId string `json:"root_id"` + OriginalId string `json:"original_id"` + + Message string `json:"message"` + // MessageSource will contain the message as submitted by the user if Message has been modified + // by Mattermost for presentation (e.g if an image proxy is being used). It should be used to + // populate edit boxes if present. + MessageSource string `json:"message_source,omitempty"` + + Type string `json:"type"` + propsMu sync.RWMutex `db:"-"` // Unexported mutex used to guard Post.Props. + Props StringInterface `json:"props"` // Deprecated: use GetProps() + Hashtags string `json:"hashtags"` + Filenames StringArray `json:"-"` // Deprecated, do not use this field any more + FileIds StringArray `json:"file_ids"` + PendingPostId string `json:"pending_post_id"` + HasReactions bool `json:"has_reactions,omitempty"` + RemoteId *string `json:"remote_id,omitempty"` + + // Transient data populated before sending a post to the client + ReplyCount int64 `json:"reply_count"` + LastReplyAt int64 `json:"last_reply_at"` + Participants []*User `json:"participants"` + IsFollowing *bool `json:"is_following,omitempty"` // for root posts in collapsed thread mode indicates if the current user is following this thread + Metadata *PostMetadata `json:"metadata,omitempty"` +} + +func (o *Post) Auditable() map[string]any { + var metaData map[string]any + if o.Metadata != nil { + metaData = o.Metadata.Auditable() + } + + return map[string]any{ + "id": o.Id, + "create_at": o.CreateAt, + "update_at": o.UpdateAt, + "edit_at": o.EditAt, + "delete_at": o.DeleteAt, + "is_pinned": o.IsPinned, + "user_id": o.UserId, + "channel_id": o.ChannelId, + "root_id": o.RootId, + "original_id": o.OriginalId, + "type": o.Type, + "props": o.GetProps(), + "file_ids": o.FileIds, + "pending_post_id": o.PendingPostId, + "remote_id": o.RemoteId, + "reply_count": o.ReplyCount, + "last_reply_at": o.LastReplyAt, + "is_following": o.IsFollowing, + "metadata": metaData, + } +} + +func (o *Post) LogClone() any { + return o.Auditable() +} + +type PostEphemeral struct { + UserID string `json:"user_id"` + Post *Post `json:"post"` +} + +type PostPatch struct { + IsPinned *bool `json:"is_pinned"` + Message *string `json:"message"` + Props *StringInterface `json:"props"` + FileIds *StringArray `json:"file_ids"` + HasReactions *bool `json:"has_reactions"` +} + +type PostReminder struct { + TargetTime int64 `json:"target_time"` + // These fields are only used internally for interacting with DB. + PostId string `json:",omitempty"` + UserId string `json:",omitempty"` +} + +type PostPriority struct { + Priority *string `json:"priority"` + RequestedAck *bool `json:"requested_ack"` + PersistentNotifications *bool `json:"persistent_notifications"` + // These fields are only used internally for interacting with DB. + PostId string `json:",omitempty"` + ChannelId string `json:",omitempty"` +} + +type PostPersistentNotifications struct { + PostId string + CreateAt int64 + LastSentAt int64 + DeleteAt int64 + SentCount int16 +} + +type GetPersistentNotificationsPostsParams struct { + MaxTime int64 + MaxSentCount int16 + PerPage int +} + +type MoveThreadParams struct { + ChannelId string `json:"channel_id"` +} + +type SearchParameter struct { + Terms *string `json:"terms"` + IsOrSearch *bool `json:"is_or_search"` + TimeZoneOffset *int `json:"time_zone_offset"` + Page *int `json:"page"` + PerPage *int `json:"per_page"` + IncludeDeletedChannels *bool `json:"include_deleted_channels"` +} + +func (sp SearchParameter) Auditable() map[string]any { + return map[string]any{ + "terms": sp.Terms, + "is_or_search": sp.IsOrSearch, + "time_zone_offset": sp.TimeZoneOffset, + "page": sp.Page, + "per_page": sp.PerPage, + "include_deleted_channels": sp.IncludeDeletedChannels, + } +} + +func (sp SearchParameter) LogClone() any { + return sp.Auditable() +} + +type AnalyticsPostCountsOptions struct { + TeamId string + BotsOnly bool + YesterdayOnly bool +} + +func (o *PostPatch) WithRewrittenImageURLs(f func(string) string) *PostPatch { + pCopy := *o //nolint:revive + if pCopy.Message != nil { + *pCopy.Message = RewriteImageURLs(*o.Message, f) + } + return &pCopy +} + +func (o *PostPatch) Auditable() map[string]any { + return map[string]any{ + "is_pinned": o.IsPinned, + "props": o.Props, + "file_ids": o.FileIds, + "has_reactions": o.HasReactions, + } +} + +type PostForExport struct { + Post + TeamName string + ChannelName string + Username string + ReplyCount int + FlaggedBy StringArray +} + +type DirectPostForExport struct { + Post + User string + ChannelMembers *[]string + FlaggedBy StringArray +} + +type ReplyForExport struct { + Post + Username string + FlaggedBy StringArray +} + +type PostForIndexing struct { + Post + TeamId string `json:"team_id"` + ParentCreateAt *int64 `json:"parent_create_at"` +} + +type FileForIndexing struct { + FileInfo + ChannelId string `json:"channel_id"` + Content string `json:"content"` +} + +// ShouldIndex tells if a file should be indexed or not. +// index files which are- +// a. not deleted +// b. have an associated post ID, if no post ID, then, +// b.i. the file should belong to the channel's bookmarks, as indicated by the "CreatorId" field. +// +// Files not passing this criteria will be deleted from ES index. +// We're deleting those files from ES index instead of simply skipping them while fetching a batch of files +// because existing ES indexes might have these files already indexed, so we need to remove them from index. +func (file *FileForIndexing) ShouldIndex() bool { + // NOTE - this function is used in server as well as Enterprise code. + // Make sure to update public package dependency in both server and Enterprise code when + // updating the logic here and to test both places. + return file != nil && file.DeleteAt == 0 && (file.PostId != "" || file.CreatorId == BookmarkFileOwner) +} + +// ShallowCopy is an utility function to shallow copy a Post to the given +// destination without touching the internal RWMutex. +func (o *Post) ShallowCopy(dst *Post) error { + if dst == nil { + return errors.New("dst cannot be nil") + } + o.propsMu.RLock() + defer o.propsMu.RUnlock() + dst.propsMu.Lock() + defer dst.propsMu.Unlock() + dst.Id = o.Id + dst.CreateAt = o.CreateAt + dst.UpdateAt = o.UpdateAt + dst.EditAt = o.EditAt + dst.DeleteAt = o.DeleteAt + dst.IsPinned = o.IsPinned + dst.UserId = o.UserId + dst.ChannelId = o.ChannelId + dst.RootId = o.RootId + dst.OriginalId = o.OriginalId + dst.Message = o.Message + dst.MessageSource = o.MessageSource + dst.Type = o.Type + dst.Props = o.Props + dst.Hashtags = o.Hashtags + dst.Filenames = o.Filenames + dst.FileIds = o.FileIds + dst.PendingPostId = o.PendingPostId + dst.HasReactions = o.HasReactions + dst.ReplyCount = o.ReplyCount + dst.Participants = o.Participants + dst.LastReplyAt = o.LastReplyAt + dst.Metadata = o.Metadata + if o.IsFollowing != nil { + dst.IsFollowing = NewPointer(*o.IsFollowing) + } + dst.RemoteId = o.RemoteId + return nil +} + +// Clone shallowly copies the post and returns the copy. +func (o *Post) Clone() *Post { + pCopy := &Post{} //nolint:revive + o.ShallowCopy(pCopy) + return pCopy +} + +func (o *Post) ToJSON() (string, error) { + pCopy := o.Clone() //nolint:revive + pCopy.StripActionIntegrations() + b, err := json.Marshal(pCopy) + return string(b), err +} + +func (o *Post) EncodeJSON(w io.Writer) error { + o.StripActionIntegrations() + return json.NewEncoder(w).Encode(o) +} + +type CreatePostFlags struct { + TriggerWebhooks bool + SetOnline bool + ForceNotification bool +} + +type GetPostsSinceOptions struct { + UserId string + ChannelId string + Time int64 + SkipFetchThreads bool + CollapsedThreads bool + CollapsedThreadsExtended bool + SortAscending bool +} + +type GetPostsSinceForSyncCursor struct { + LastPostUpdateAt int64 + LastPostUpdateID string + LastPostCreateAt int64 + LastPostCreateID string +} + +func (c GetPostsSinceForSyncCursor) IsEmpty() bool { + return c.LastPostCreateAt == 0 && c.LastPostCreateID == "" && c.LastPostUpdateAt == 0 && c.LastPostUpdateID == "" +} + +type GetPostsSinceForSyncOptions struct { + ChannelId string + ExcludeRemoteId string + IncludeDeleted bool + SinceCreateAt bool // determines whether the cursor will be based on CreateAt or UpdateAt + ExcludeChannelMetadataSystemPosts bool // if true, exclude channel metadata system posts (header, display name, purpose changes) +} + +type GetPostsOptions struct { + UserId string + ChannelId string + PostId string + Page int + PerPage int + SkipFetchThreads bool + CollapsedThreads bool + CollapsedThreadsExtended bool + FromPost string // PostId after which to send the items + FromCreateAt int64 // CreateAt after which to send the items + FromUpdateAt int64 // UpdateAt after which to send the items. This cannot be used with FromCreateAt. + Direction string // Only accepts up|down. Indicates the order in which to send the items. + UpdatesOnly bool // This flag is used to make the API work with the updateAt value. + IncludeDeleted bool + IncludePostPriority bool +} + +type PostCountOptions struct { + // Only include posts on a specific team. "" for any team. + TeamId string + MustHaveFile bool + MustHaveHashtag bool + ExcludeDeleted bool + ExcludeSystemPosts bool + UsersPostsOnly bool + // AllowFromCache looks up cache only when ExcludeDeleted and UsersPostsOnly are true and rest are falsy. + AllowFromCache bool + + // retrieves posts in the inclusive range: [SinceUpdateAt + LastPostId, UntilUpdateAt] + SincePostID string + SinceUpdateAt int64 + UntilUpdateAt int64 +} + +func (o *Post) Etag() string { + return Etag(o.Id, o.UpdateAt) +} + +func (o *Post) IsValid(maxPostSize int) *AppError { + if !IsValidId(o.Id) { + return NewAppError("Post.IsValid", "model.post.is_valid.id.app_error", nil, "", http.StatusBadRequest) + } + + if o.CreateAt == 0 { + return NewAppError("Post.IsValid", "model.post.is_valid.create_at.app_error", nil, "id="+o.Id, http.StatusBadRequest) + } + + if o.UpdateAt == 0 { + return NewAppError("Post.IsValid", "model.post.is_valid.update_at.app_error", nil, "id="+o.Id, http.StatusBadRequest) + } + + if !IsValidId(o.UserId) { + return NewAppError("Post.IsValid", "model.post.is_valid.user_id.app_error", nil, "", http.StatusBadRequest) + } + + if !IsValidId(o.ChannelId) { + return NewAppError("Post.IsValid", "model.post.is_valid.channel_id.app_error", nil, "", http.StatusBadRequest) + } + + if !(IsValidId(o.RootId) || o.RootId == "") { + return NewAppError("Post.IsValid", "model.post.is_valid.root_id.app_error", nil, "", http.StatusBadRequest) + } + + if !(len(o.OriginalId) == 26 || o.OriginalId == "") { + return NewAppError("Post.IsValid", "model.post.is_valid.original_id.app_error", nil, "", http.StatusBadRequest) + } + + if utf8.RuneCountInString(o.Message) > maxPostSize { + return NewAppError("Post.IsValid", "model.post.is_valid.message_length.app_error", + map[string]any{"Length": utf8.RuneCountInString(o.Message), "MaxLength": maxPostSize}, "id="+o.Id, http.StatusBadRequest) + } + + if utf8.RuneCountInString(o.Hashtags) > PostHashtagsMaxRunes { + return NewAppError("Post.IsValid", "model.post.is_valid.hashtags.app_error", nil, "id="+o.Id, http.StatusBadRequest) + } + + switch o.Type { + case + PostTypeDefault, + PostTypeSystemGeneric, + PostTypeJoinLeave, + PostTypeAutoResponder, + PostTypeAddRemove, + PostTypeJoinChannel, + PostTypeGuestJoinChannel, + PostTypeLeaveChannel, + PostTypeJoinTeam, + PostTypeLeaveTeam, + PostTypeAddToChannel, + PostTypeAddGuestToChannel, + PostTypeRemoveFromChannel, + PostTypeMoveChannel, + PostTypeAddToTeam, + PostTypeRemoveFromTeam, + PostTypeSlackAttachment, + PostTypeHeaderChange, + PostTypePurposeChange, + PostTypeDisplaynameChange, + PostTypeConvertChannel, + PostTypeChannelDeleted, + PostTypeChannelRestored, + PostTypeChangeChannelPrivacy, + PostTypeAddBotTeamsChannels, + PostTypeReminder, + PostTypeMe, + PostTypeWrangler, + PostTypeGMConvertedToChannel: + default: + if !strings.HasPrefix(o.Type, PostCustomTypePrefix) { + return NewAppError("Post.IsValid", "model.post.is_valid.type.app_error", nil, "id="+o.Type, http.StatusBadRequest) + } + } + + if utf8.RuneCountInString(ArrayToJSON(o.Filenames)) > PostFilenamesMaxRunes { + return NewAppError("Post.IsValid", "model.post.is_valid.filenames.app_error", nil, "id="+o.Id, http.StatusBadRequest) + } + + if utf8.RuneCountInString(ArrayToJSON(o.FileIds)) > PostFileidsMaxRunes { + return NewAppError("Post.IsValid", "model.post.is_valid.file_ids.app_error", nil, "id="+o.Id, http.StatusBadRequest) + } + + if utf8.RuneCountInString(StringInterfaceToJSON(o.GetProps())) > PostPropsMaxRunes { + return NewAppError("Post.IsValid", "model.post.is_valid.props.app_error", nil, "id="+o.Id, http.StatusBadRequest) + } + + return nil +} + +func (o *Post) SanitizeProps() { + if o == nil { + return + } + membersToSanitize := []string{ + PropsAddChannelMember, + PostPropsForceNotification, + } + + for _, member := range membersToSanitize { + if _, ok := o.GetProps()[member]; ok { + o.DelProp(member) + } + } + for _, p := range o.Participants { + p.Sanitize(map[string]bool{}) + } +} + +// Remove any input data from the post object that is not user controlled +func (o *Post) SanitizeInput() { + o.DeleteAt = 0 + o.RemoteId = NewPointer("") + + if o.Metadata != nil { + o.Metadata.Embeds = nil + } +} + +func (o *Post) ContainsIntegrationsReservedProps() []string { + return ContainsIntegrationsReservedProps(o.GetProps()) +} + +func (o *PostPatch) ContainsIntegrationsReservedProps() []string { + if o == nil || o.Props == nil { + return nil + } + return ContainsIntegrationsReservedProps(*o.Props) +} + +func ContainsIntegrationsReservedProps(props StringInterface) []string { + foundProps := []string{} + + if props != nil { + reservedProps := []string{ + PostPropsFromWebhook, + PostPropsOverrideUsername, + PostPropsWebhookDisplayName, + PostPropsOverrideIconURL, + PostPropsOverrideIconEmoji, + } + + for _, key := range reservedProps { + if _, ok := props[key]; ok { + foundProps = append(foundProps, key) + } + } + } + + return foundProps +} + +func (o *Post) PreSave() { + if o.Id == "" { + o.Id = NewId() + } + + o.OriginalId = "" + + if o.CreateAt == 0 { + o.CreateAt = GetMillis() + } + + o.UpdateAt = o.CreateAt + o.PreCommit() +} + +func (o *Post) PreCommit() { + if o.GetProps() == nil { + o.SetProps(make(map[string]any)) + } + + if o.Filenames == nil { + o.Filenames = []string{} + } + + if o.FileIds == nil { + o.FileIds = []string{} + } + + o.GenerateActionIds() + + // There's a rare bug where the client sends up duplicate FileIds so protect against that + o.FileIds = RemoveDuplicateStrings(o.FileIds) +} + +func (o *Post) MakeNonNil() { + if o.GetProps() == nil { + o.SetProps(make(map[string]any)) + } +} + +func (o *Post) DelProp(key string) { + o.propsMu.Lock() + defer o.propsMu.Unlock() + propsCopy := make(map[string]any, len(o.Props)-1) + maps.Copy(propsCopy, o.Props) + delete(propsCopy, key) + o.Props = propsCopy +} + +func (o *Post) AddProp(key string, value any) { + o.propsMu.Lock() + defer o.propsMu.Unlock() + propsCopy := make(map[string]any, len(o.Props)+1) + maps.Copy(propsCopy, o.Props) + propsCopy[key] = value + o.Props = propsCopy +} + +func (o *Post) GetProps() StringInterface { + o.propsMu.RLock() + defer o.propsMu.RUnlock() + return o.Props +} + +func (o *Post) SetProps(props StringInterface) { + o.propsMu.Lock() + defer o.propsMu.Unlock() + o.Props = props +} + +func (o *Post) GetProp(key string) any { + o.propsMu.RLock() + defer o.propsMu.RUnlock() + return o.Props[key] +} + +// ValidateProps checks all known props for validity. +// Currently, it logs warnings for invalid props rather than returning an error. +// In a future version, this will be updated to return errors for invalid props. +func (o *Post) ValidateProps(logger mlog.LoggerIFace) { + if err := o.propsIsValid(); err != nil { + logger.Warn( + "Invalid post props. In a future version this will result in an error. Please update your integration to be compliant.", + mlog.String("post_id", o.Id), + mlog.Err(err), + ) + } +} + +func (o *Post) propsIsValid() error { + var multiErr *multierror.Error + + props := o.GetProps() + + // Check basic props validity + if props == nil { + return nil + } + + if props[PostPropsAddedUserId] != nil { + if addedUserID, ok := props[PostPropsAddedUserId].(string); !ok { + multiErr = multierror.Append(multiErr, fmt.Errorf("added_user_id prop must be a string")) + } else if !IsValidId(addedUserID) { + multiErr = multierror.Append(multiErr, fmt.Errorf("added_user_id prop must be a valid user ID")) + } + } + if props[PostPropsDeleteBy] != nil { + if deleteByID, ok := props[PostPropsDeleteBy].(string); !ok { + multiErr = multierror.Append(multiErr, fmt.Errorf("delete_by prop must be a string")) + } else if !IsValidId(deleteByID) { + multiErr = multierror.Append(multiErr, fmt.Errorf("delete_by prop must be a valid user ID")) + } + } + + // Validate integration props + if props[PostPropsOverrideIconURL] != nil { + if iconURL, ok := props[PostPropsOverrideIconURL].(string); !ok { + multiErr = multierror.Append(multiErr, fmt.Errorf("override_icon_url prop must be a string")) + } else if iconURL == "" || !IsValidHTTPURL(iconURL) { + multiErr = multierror.Append(multiErr, fmt.Errorf("override_icon_url prop must be a valid URL")) + } + } + if props[PostPropsOverrideIconEmoji] != nil { + if _, ok := props[PostPropsOverrideIconEmoji].(string); !ok { + multiErr = multierror.Append(multiErr, fmt.Errorf("override_icon_emoji prop must be a string")) + } + } + if props[PostPropsOverrideUsername] != nil { + if _, ok := props[PostPropsOverrideUsername].(string); !ok { + multiErr = multierror.Append(multiErr, fmt.Errorf("override_username prop must be a string")) + } + } + if props[PostPropsFromWebhook] != nil { + if fromWebhook, ok := props[PostPropsFromWebhook].(string); !ok { + multiErr = multierror.Append(multiErr, fmt.Errorf("from_webhook prop must be a string")) + } else if fromWebhook != "true" { + multiErr = multierror.Append(multiErr, fmt.Errorf("from_webhook prop must be \"true\"")) + } + } + if props[PostPropsFromBot] != nil { + if fromBot, ok := props[PostPropsFromBot].(string); !ok { + multiErr = multierror.Append(multiErr, fmt.Errorf("from_bot prop must be a string")) + } else if fromBot != "true" { + multiErr = multierror.Append(multiErr, fmt.Errorf("from_bot prop must be \"true\"")) + } + } + if props[PostPropsFromOAuthApp] != nil { + if fromOAuthApp, ok := props[PostPropsFromOAuthApp].(string); !ok { + multiErr = multierror.Append(multiErr, fmt.Errorf("from_oauth_app prop must be a string")) + } else if fromOAuthApp != "true" { + multiErr = multierror.Append(multiErr, fmt.Errorf("from_oauth_app prop must be \"true\"")) + } + } + if props[PostPropsFromPlugin] != nil { + if fromPlugin, ok := props[PostPropsFromPlugin].(string); !ok { + multiErr = multierror.Append(multiErr, fmt.Errorf("from_plugin prop must be a string")) + } else if fromPlugin != "true" { + multiErr = multierror.Append(multiErr, fmt.Errorf("from_plugin prop must be \"true\"")) + } + } + if props[PostPropsUnsafeLinks] != nil { + if unsafeLinks, ok := props[PostPropsUnsafeLinks].(string); !ok { + multiErr = multierror.Append(multiErr, fmt.Errorf("unsafe_links prop must be a string")) + } else if unsafeLinks != "true" { + multiErr = multierror.Append(multiErr, fmt.Errorf("unsafe_links prop must be \"true\"")) + } + } + if props[PostPropsWebhookDisplayName] != nil { + if _, ok := props[PostPropsWebhookDisplayName].(string); !ok { + multiErr = multierror.Append(multiErr, fmt.Errorf("webhook_display_name prop must be a string")) + } + } + + if props[PostPropsMentionHighlightDisabled] != nil { + if _, ok := props[PostPropsMentionHighlightDisabled].(bool); !ok { + multiErr = multierror.Append(multiErr, fmt.Errorf("mention_highlight_disabled prop must be a boolean")) + } + } + if props[PostPropsGroupHighlightDisabled] != nil { + if _, ok := props[PostPropsGroupHighlightDisabled].(bool); !ok { + multiErr = multierror.Append(multiErr, fmt.Errorf("disable_group_highlight prop must be a boolean")) + } + } + + if props[PostPropsPreviewedPost] != nil { + if previewedPostID, ok := props[PostPropsPreviewedPost].(string); !ok { + multiErr = multierror.Append(multiErr, fmt.Errorf("previewed_post prop must be a string")) + } else if !IsValidId(previewedPostID) { + multiErr = multierror.Append(multiErr, fmt.Errorf("previewed_post prop must be a valid post ID")) + } + } + + if props[PostPropsForceNotification] != nil { + if _, ok := props[PostPropsForceNotification].(bool); !ok { + multiErr = multierror.Append(multiErr, fmt.Errorf("force_notification prop must be a boolean")) + } + } + + if props[PostPropsAIGeneratedByUserID] != nil { + if aiGenUserID, ok := props[PostPropsAIGeneratedByUserID].(string); !ok { + multiErr = multierror.Append(multiErr, fmt.Errorf("ai_generated_by prop must be a string")) + } else if !IsValidId(aiGenUserID) { + multiErr = multierror.Append(multiErr, fmt.Errorf("ai_generated_by prop must be a valid user ID")) + } + } + + if props[PostPropsAIGeneratedByUsername] != nil { + if _, ok := props[PostPropsAIGeneratedByUsername].(string); !ok { + multiErr = multierror.Append(multiErr, fmt.Errorf("ai_generated_by_username prop must be a string")) + } + } + + for i, a := range o.Attachments() { + if err := a.IsValid(); err != nil { + multiErr = multierror.Append(multiErr, multierror.Prefix(err, fmt.Sprintf("message attachtment at index %d is invalid:", i))) + } + } + + return multiErr.ErrorOrNil() +} + +func (o *Post) IsSystemMessage() bool { + return len(o.Type) >= len(PostSystemMessagePrefix) && o.Type[:len(PostSystemMessagePrefix)] == PostSystemMessagePrefix +} + +// IsRemote returns true if the post originated on a remote cluster. +func (o *Post) IsRemote() bool { + return o.RemoteId != nil && *o.RemoteId != "" +} + +// GetRemoteID safely returns the remoteID or empty string if not remote. +func (o *Post) GetRemoteID() string { + if o.RemoteId != nil { + return *o.RemoteId + } + return "" +} + +func (o *Post) IsJoinLeaveMessage() bool { + return o.Type == PostTypeJoinLeave || + o.Type == PostTypeAddRemove || + o.Type == PostTypeJoinChannel || + o.Type == PostTypeLeaveChannel || + o.Type == PostTypeJoinTeam || + o.Type == PostTypeLeaveTeam || + o.Type == PostTypeAddToChannel || + o.Type == PostTypeRemoveFromChannel || + o.Type == PostTypeAddToTeam || + o.Type == PostTypeRemoveFromTeam +} + +func (o *Post) Patch(patch *PostPatch) { + if patch.IsPinned != nil { + o.IsPinned = *patch.IsPinned + } + + if patch.Message != nil { + o.Message = *patch.Message + } + + if patch.Props != nil { + newProps := *patch.Props + o.SetProps(newProps) + } + + if patch.FileIds != nil { + o.FileIds = *patch.FileIds + } + + if patch.HasReactions != nil { + o.HasReactions = *patch.HasReactions + } +} + +func (o *Post) ChannelMentions() []string { + return ChannelMentions(o.Message) +} + +// DisableMentionHighlights disables a posts mention highlighting and returns the first channel mention that was present in the message. +func (o *Post) DisableMentionHighlights() string { + mention, hasMentions := findAtChannelMention(o.Message) + if hasMentions { + o.AddProp(PostPropsMentionHighlightDisabled, true) + } + return mention +} + +// DisableMentionHighlights disables mention highlighting for a post patch if required. +func (o *PostPatch) DisableMentionHighlights() { + if o.Message == nil { + return + } + if _, hasMentions := findAtChannelMention(*o.Message); hasMentions { + if o.Props == nil { + o.Props = &StringInterface{} + } + (*o.Props)[PostPropsMentionHighlightDisabled] = true + } +} + +func findAtChannelMention(message string) (mention string, found bool) { + re := regexp.MustCompile(`(?i)\B@(channel|all|here)\b`) + matched := re.FindStringSubmatch(message) + if found = (len(matched) > 0); found { + mention = strings.ToLower(matched[0]) + } + return +} + +func (o *Post) Attachments() []*SlackAttachment { + if attachments, ok := o.GetProp(PostPropsAttachments).([]*SlackAttachment); ok { + return attachments + } + var ret []*SlackAttachment + if attachments, ok := o.GetProp(PostPropsAttachments).([]any); ok { + for _, attachment := range attachments { + if enc, err := json.Marshal(attachment); err == nil { + var decoded SlackAttachment + if json.Unmarshal(enc, &decoded) == nil { + // Ignoring nil actions + i := 0 + for _, action := range decoded.Actions { + if action != nil { + decoded.Actions[i] = action + i++ + } + } + decoded.Actions = decoded.Actions[:i] + + // Ignoring nil fields + i = 0 + for _, field := range decoded.Fields { + if field != nil { + decoded.Fields[i] = field + i++ + } + } + decoded.Fields = decoded.Fields[:i] + ret = append(ret, &decoded) + } + } + } + } + return ret +} + +func (o *Post) AttachmentsEqual(input *Post) bool { + attachments := o.Attachments() + inputAttachments := input.Attachments() + + if len(attachments) != len(inputAttachments) { + return false + } + + for i := range attachments { + if !attachments[i].Equals(inputAttachments[i]) { + return false + } + } + + return true +} + +var markdownDestinationEscaper = strings.NewReplacer( + `\`, `\\`, + `<`, `\<`, + `>`, `\>`, + `(`, `\(`, + `)`, `\)`, +) + +// WithRewrittenImageURLs returns a new shallow copy of the post where the message has been +// rewritten via RewriteImageURLs. +func (o *Post) WithRewrittenImageURLs(f func(string) string) *Post { + pCopy := o.Clone() + pCopy.Message = RewriteImageURLs(o.Message, f) + if pCopy.MessageSource == "" && pCopy.Message != o.Message { + pCopy.MessageSource = o.Message + } + return pCopy +} + +// RewriteImageURLs takes a message and returns a copy that has all of the image URLs replaced +// according to the function f. For each image URL, f will be invoked, and the resulting markdown +// will contain the URL returned by that invocation instead. +// +// Image URLs are destination URLs used in inline images or reference definitions that are used +// anywhere in the input markdown as an image. +func RewriteImageURLs(message string, f func(string) string) string { + if !strings.Contains(message, "![") { + return message + } + + var ranges []markdown.Range + + markdown.Inspect(message, func(blockOrInline any) bool { + switch v := blockOrInline.(type) { + case *markdown.ReferenceImage: + ranges = append(ranges, v.ReferenceDefinition.RawDestination) + case *markdown.InlineImage: + ranges = append(ranges, v.RawDestination) + default: + return true + } + return true + }) + + if ranges == nil { + return message + } + + sort.Slice(ranges, func(i, j int) bool { + return ranges[i].Position < ranges[j].Position + }) + + copyRanges := make([]markdown.Range, 0, len(ranges)) + urls := make([]string, 0, len(ranges)) + resultLength := len(message) + + start := 0 + for i, r := range ranges { + switch { + case i == 0: + case r.Position != ranges[i-1].Position: + start = ranges[i-1].End + default: + continue + } + original := message[r.Position:r.End] + replacement := markdownDestinationEscaper.Replace(f(markdown.Unescape(original))) + resultLength += len(replacement) - len(original) + copyRanges = append(copyRanges, markdown.Range{Position: start, End: r.Position}) + urls = append(urls, replacement) + } + + result := make([]byte, resultLength) + + offset := 0 + for i, r := range copyRanges { + offset += copy(result[offset:], message[r.Position:r.End]) + offset += copy(result[offset:], urls[i]) + } + copy(result[offset:], message[ranges[len(ranges)-1].End:]) + + return string(result) +} + +func (o *Post) IsFromOAuthBot() bool { + props := o.GetProps() + return props[PostPropsFromWebhook] == "true" && props[PostPropsOverrideUsername] != "" +} + +func (o *Post) ToNilIfInvalid() *Post { + if o.Id == "" { + return nil + } + return o +} + +func (o *Post) ForPlugin() *Post { + p := o.Clone() + p.Metadata = nil + if p.Type == fmt.Sprintf("%sup_notification", PostCustomTypePrefix) { + p.DelProp("requested_features") + } + return p +} + +func (o *Post) GetPreviewPost() *PreviewPost { + if o.Metadata == nil { + return nil + } + for _, embed := range o.Metadata.Embeds { + if embed != nil && embed.Type == PostEmbedPermalink { + if previewPost, ok := embed.Data.(*PreviewPost); ok { + return previewPost + } + } + } + return nil +} + +func (o *Post) GetPreviewedPostProp() string { + if val, ok := o.GetProp(PostPropsPreviewedPost).(string); ok { + return val + } + return "" +} + +func (o *Post) GetPriority() *PostPriority { + if o.Metadata == nil { + return nil + } + return o.Metadata.Priority +} + +func (o *Post) GetPersistentNotification() *bool { + priority := o.GetPriority() + if priority == nil { + return nil + } + return priority.PersistentNotifications +} + +func (o *Post) GetRequestedAck() *bool { + priority := o.GetPriority() + if priority == nil { + return nil + } + return priority.RequestedAck +} + +func (o *Post) IsUrgent() bool { + postPriority := o.GetPriority() + if postPriority == nil { + return false + } + + if postPriority.Priority == nil { + return false + } + + return *postPriority.Priority == PostPriorityUrgent +} + +func (o *Post) CleanPost() *Post { + o.Id = "" + o.CreateAt = 0 + o.UpdateAt = 0 + o.EditAt = 0 + return o +} + +type UpdatePostOptions struct { + SafeUpdate bool + IsRestorePost bool +} + +func DefaultUpdatePostOptions() *UpdatePostOptions { + return &UpdatePostOptions{ + SafeUpdate: false, + IsRestorePost: false, + } +} + +type PreparePostForClientOpts struct { + IsNewPost bool + IsEditPost bool + IncludePriority bool + RetainContent bool + IncludeDeleted bool +} + +type RewriteAction string + +const ( + RewriteActionCustom RewriteAction = "custom" + RewriteActionShorten RewriteAction = "shorten" + RewriteActionElaborate RewriteAction = "elaborate" + RewriteActionImproveWriting RewriteAction = "improve_writing" + RewriteActionFixSpelling RewriteAction = "fix_spelling" + RewriteActionSimplify RewriteAction = "simplify" + RewriteActionSummarize RewriteAction = "summarize" +) + +type RewriteRequest struct { + AgentID string `json:"agent_id"` + Message string `json:"message"` + Action RewriteAction `json:"action"` + CustomPrompt string `json:"custom_prompt,omitempty"` +} + +type RewriteResponse struct { + RewrittenText string `json:"rewritten_text"` +} + +const RewriteSystemPrompt = `You are a JSON API that rewrites text. Your response must be valid JSON only. +Return this exact format: {"rewritten_text":"content"}. +Do not use markdown, code blocks, or any formatting. Start with { and end with }.` diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/post_acknowledgement.go b/vendor/github.com/mattermost/mattermost/server/public/model/post_acknowledgement.go new file mode 100644 index 00000000..3d343cbc --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/post_acknowledgement.go @@ -0,0 +1,43 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import "net/http" + +type PostAcknowledgement struct { + UserId string `json:"user_id"` + PostId string `json:"post_id"` + AcknowledgedAt int64 `json:"acknowledged_at"` + ChannelId string `json:"channel_id"` + RemoteId *string `json:"remote_id,omitempty"` +} + +func (o *PostAcknowledgement) IsValid() *AppError { + if !IsValidId(o.UserId) { + return NewAppError("PostAcknowledgement.IsValid", "model.acknowledgement.is_valid.user_id.app_error", nil, "user_id="+o.UserId, http.StatusBadRequest) + } + + if !IsValidId(o.PostId) { + return NewAppError("PostAcknowledgement.IsValid", "model.acknowledgement.is_valid.post_id.app_error", nil, "post_id="+o.PostId, http.StatusBadRequest) + } + + if !IsValidId(o.ChannelId) { + return NewAppError("PostAcknowledgement.IsValid", "model.acknowledgement.is_valid.channel_id.app_error", nil, "channel_id="+o.ChannelId, http.StatusBadRequest) + } + + return nil +} + +func (o *PostAcknowledgement) GetRemoteID() string { + if o.RemoteId != nil { + return *o.RemoteId + } + return "" +} + +func (o *PostAcknowledgement) PreSave() { + if o.AcknowledgedAt == 0 { + o.AcknowledgedAt = GetMillis() + } +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/post_embed.go b/vendor/github.com/mattermost/mattermost/server/public/model/post_embed.go new file mode 100644 index 00000000..3081f16f --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/post_embed.go @@ -0,0 +1,33 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +const ( + PostEmbedImage PostEmbedType = "image" + PostEmbedMessageAttachment PostEmbedType = "message_attachment" + PostEmbedOpengraph PostEmbedType = "opengraph" + PostEmbedLink PostEmbedType = "link" + PostEmbedPermalink PostEmbedType = "permalink" + PostEmbedBoards PostEmbedType = "boards" +) + +type PostEmbedType string + +type PostEmbed struct { + Type PostEmbedType `json:"type"` + + // The URL of the embedded content. Used for image and OpenGraph embeds. + URL string `json:"url,omitempty"` + + // Any additional data for the embedded content. Only used for OpenGraph embeds. + Data any `json:"data,omitempty"` +} + +func (pe *PostEmbed) Auditable() map[string]any { + // filter out embedded content. + return map[string]any{ + "type": pe.Type, + "url": pe.URL, + } +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/post_info.go b/vendor/github.com/mattermost/mattermost/server/public/model/post_info.go new file mode 100644 index 00000000..0a48ae9a --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/post_info.go @@ -0,0 +1,15 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +type PostInfo struct { + ChannelId string `json:"channel_id"` + ChannelType ChannelType `json:"channel_type"` + ChannelDisplayName string `json:"channel_display_name"` + HasJoinedChannel bool `json:"has_joined_channel"` + TeamId string `json:"team_id"` + TeamType string `json:"team_type"` + TeamDisplayName string `json:"team_display_name"` + HasJoinedTeam bool `json:"has_joined_team"` +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/post_list.go b/vendor/github.com/mattermost/mattermost/server/public/model/post_list.go new file mode 100644 index 00000000..f5eba60f --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/post_list.go @@ -0,0 +1,228 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "encoding/json" + "io" + "sort" +) + +type PostList struct { + Order []string `json:"order"` + Posts map[string]*Post `json:"posts"` + NextPostId string `json:"next_post_id"` + PrevPostId string `json:"prev_post_id"` + // HasNext indicates whether there are more items to be fetched or not. + HasNext *bool `json:"has_next,omitempty"` + // If there are inaccessible posts, FirstInaccessiblePostTime is the time of the latest inaccessible post + FirstInaccessiblePostTime int64 `json:"first_inaccessible_post_time"` +} + +func NewPostList() *PostList { + return &PostList{ + Order: make([]string, 0), + Posts: make(map[string]*Post), + NextPostId: "", + PrevPostId: "", + } +} + +func (o *PostList) Clone() *PostList { + orderCopy := make([]string, len(o.Order)) + postsCopy := make(map[string]*Post) + copy(orderCopy, o.Order) + for k, v := range o.Posts { + postsCopy[k] = v.Clone() + } + return &PostList{ + Order: orderCopy, + Posts: postsCopy, + NextPostId: o.NextPostId, + PrevPostId: o.PrevPostId, + HasNext: o.HasNext, + FirstInaccessiblePostTime: o.FirstInaccessiblePostTime, + } +} + +func (o *PostList) ForPlugin() *PostList { + plCopy := o.Clone() + for k, p := range plCopy.Posts { + plCopy.Posts[k] = p.ForPlugin() + } + return plCopy +} + +func (o *PostList) ToSlice() []*Post { + var posts []*Post + + if l := len(o.Posts); l > 0 { + posts = make([]*Post, 0, l) + } + + for _, id := range o.Order { + posts = append(posts, o.Posts[id]) + } + return posts +} + +func (o *PostList) WithRewrittenImageURLs(f func(string) string) *PostList { + plCopy := *o + plCopy.Posts = make(map[string]*Post) + for id, post := range o.Posts { + plCopy.Posts[id] = post.WithRewrittenImageURLs(f) + } + return &plCopy +} + +func (o *PostList) StripActionIntegrations() { + posts := o.Posts + o.Posts = make(map[string]*Post) + for id, post := range posts { + pcopy := post.Clone() + pcopy.StripActionIntegrations() + o.Posts[id] = pcopy + } +} + +func (o *PostList) ToJSON() (string, error) { + plCopy := *o + plCopy.StripActionIntegrations() + b, err := json.Marshal(&plCopy) + return string(b), err +} + +func (o *PostList) EncodeJSON(w io.Writer) error { + o.StripActionIntegrations() + return json.NewEncoder(w).Encode(o) +} + +func (o *PostList) MakeNonNil() { + if o.Order == nil { + o.Order = make([]string, 0) + } + + if o.Posts == nil { + o.Posts = make(map[string]*Post) + } + + for _, v := range o.Posts { + v.MakeNonNil() + } +} + +func (o *PostList) AddOrder(id string) { + if o.Order == nil { + o.Order = make([]string, 0, 128) + } + + o.Order = append(o.Order, id) +} + +func (o *PostList) AddPost(post *Post) { + if o.Posts == nil { + o.Posts = make(map[string]*Post) + } + + o.Posts[post.Id] = post +} + +func (o *PostList) UniqueOrder() { + keys := make(map[string]bool) + order := []string{} + for _, postId := range o.Order { + if _, value := keys[postId]; !value { + keys[postId] = true + order = append(order, postId) + } + } + + o.Order = order +} + +func (o *PostList) Extend(other *PostList) { + for postId := range other.Posts { + o.AddPost(other.Posts[postId]) + } + + for _, postId := range other.Order { + o.AddOrder(postId) + } + + o.UniqueOrder() +} + +func (o *PostList) SortByCreateAt() { + sort.Slice(o.Order, func(i, j int) bool { + return o.Posts[o.Order[i]].CreateAt > o.Posts[o.Order[j]].CreateAt + }) +} + +func (o *PostList) Etag() string { + id := "0" + var t int64 + + for _, v := range o.Posts { + if v.UpdateAt > t { + t = v.UpdateAt + id = v.Id + } else if v.UpdateAt == t && v.Id > id { + t = v.UpdateAt + id = v.Id + } + } + + orderId := "" + if len(o.Order) > 0 { + orderId = o.Order[0] + } + + return Etag(orderId, id, t) +} + +func (o *PostList) IsChannelId(channelId string) bool { + for _, v := range o.Posts { + if v.ChannelId != channelId { + return false + } + } + + return true +} + +func (o *PostList) BuildWranglerPostList() *WranglerPostList { + wpl := &WranglerPostList{} + + o.UniqueOrder() + o.SortByCreateAt() + posts := o.ToSlice() + + if len(posts) == 0 { + // Something was sorted wrong or an empty PostList was provided. + return wpl + } + + // A separate ID key map to ensure no duplicates. + idKeys := make(map[string]bool) + + for i := range posts { + p := posts[len(posts)-i-1] + + // Add UserID to metadata if it's new. + if _, ok := idKeys[p.UserId]; !ok { + idKeys[p.UserId] = true + wpl.ThreadUserIDs = append(wpl.ThreadUserIDs, p.UserId) + } + + wpl.FileAttachmentCount += int64(len(p.FileIds)) + + wpl.Posts = append(wpl.Posts, p) + } + + // Set metadata for earliest and latest posts + wpl.EarlistPostTimestamp = wpl.RootPost().CreateAt + wpl.LatestPostTimestamp = wpl.Posts[wpl.NumPosts()-1].CreateAt + + return wpl +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/post_metadata.go b/vendor/github.com/mattermost/mattermost/server/public/model/post_metadata.go new file mode 100644 index 00000000..e5ed028c --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/post_metadata.go @@ -0,0 +1,108 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "maps" +) + +type PostMetadata struct { + // Embeds holds information required to render content embedded in the post. This includes the OpenGraph metadata + // for links in the post. + Embeds []*PostEmbed `json:"embeds,omitempty"` + + // Emojis holds all custom emojis used in the post or used in reaction to the post. + Emojis []*Emoji `json:"emojis,omitempty"` + + // Files holds information about the file attachments on the post. + Files []*FileInfo `json:"files,omitempty"` + + // Images holds the dimensions of all external images in the post as a map of the image URL to its dimensions. + // This includes image embeds (when the message contains a plaintext link to an image), Markdown images, images + // contained in the OpenGraph metadata, and images contained in message attachments. It does not contain + // the dimensions of any file attachments as those are stored in FileInfos. + Images map[string]*PostImage `json:"images,omitempty"` + + // Reactions holds reactions made to the post. + Reactions []*Reaction `json:"reactions,omitempty"` + + // Priority holds info about priority settings for the post. + Priority *PostPriority `json:"priority,omitempty"` + + // Acknowledgements holds acknowledgements made by users to the post + Acknowledgements []*PostAcknowledgement `json:"acknowledgements,omitempty"` +} + +func (p *PostMetadata) Auditable() map[string]any { + embeds := make([]map[string]any, 0, len(p.Embeds)) + for _, pe := range p.Embeds { + embeds = append(embeds, pe.Auditable()) + } + if len(embeds) == 0 { + embeds = nil + } + + return map[string]any{ + "embeds": embeds, + "emojis": p.Emojis, + "files": p.Files, + "images": p.Images, + "reactions": p.Reactions, + "priority": p.Priority, + "acknowledgements": p.Acknowledgements, + } +} + +type PostImage struct { + Width int `json:"width"` + Height int `json:"height"` + + // Format is the name of the image format as used by image/go such as "png", "gif", or "jpeg". + Format string `json:"format"` + + // FrameCount stores the number of frames in this image, if it is an animated gif. It will be 0 for other formats. + FrameCount int `json:"frame_count"` +} + +// Copy does a deep copy +func (p *PostMetadata) Copy() *PostMetadata { + embedsCopy := make([]*PostEmbed, len(p.Embeds)) + copy(embedsCopy, p.Embeds) + + emojisCopy := make([]*Emoji, len(p.Emojis)) + copy(emojisCopy, p.Emojis) + + filesCopy := make([]*FileInfo, len(p.Files)) + copy(filesCopy, p.Files) + + imagesCopy := map[string]*PostImage{} + maps.Copy(imagesCopy, p.Images) + + reactionsCopy := make([]*Reaction, len(p.Reactions)) + copy(reactionsCopy, p.Reactions) + + acknowledgementsCopy := make([]*PostAcknowledgement, len(p.Acknowledgements)) + copy(acknowledgementsCopy, p.Acknowledgements) + + var postPriorityCopy *PostPriority + if p.Priority != nil { + postPriorityCopy = &PostPriority{ + Priority: p.Priority.Priority, + RequestedAck: p.Priority.RequestedAck, + PersistentNotifications: p.Priority.PersistentNotifications, + PostId: p.Priority.PostId, + ChannelId: p.Priority.ChannelId, + } + } + + return &PostMetadata{ + Embeds: embedsCopy, + Emojis: emojisCopy, + Files: filesCopy, + Images: imagesCopy, + Reactions: reactionsCopy, + Priority: postPriorityCopy, + Acknowledgements: acknowledgementsCopy, + } +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/post_search_results.go b/vendor/github.com/mattermost/mattermost/server/public/model/post_search_results.go new file mode 100644 index 00000000..b9ae4d3b --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/post_search_results.go @@ -0,0 +1,56 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "encoding/json" + "io" +) + +type PostSearchMatches map[string][]string + +type PostSearchResults struct { + *PostList + Matches PostSearchMatches `json:"matches"` +} + +func MakePostSearchResults(posts *PostList, matches PostSearchMatches) *PostSearchResults { + return &PostSearchResults{ + posts, + matches, + } +} + +func (o *PostSearchResults) ToJSON() (string, error) { + psCopy := *o + psCopy.PostList.StripActionIntegrations() + b, err := json.Marshal(&psCopy) + return string(b), err +} + +func (o *PostSearchResults) EncodeJSON(w io.Writer) error { + o.PostList.StripActionIntegrations() + return json.NewEncoder(w).Encode(o) +} + +func (o *PostSearchResults) ForPlugin() *PostSearchResults { + plCopy := *o + plCopy.PostList = plCopy.PostList.ForPlugin() + return &plCopy +} + +func (o *PostSearchResults) Auditable() map[string]any { + var numResults int + var hasNext bool + + if o.PostList != nil { + numResults = len(o.PostList.Posts) + hasNext = SafeDereference(o.PostList.HasNext) + } + + return map[string]any{ + "num_results": numResults, + "has_next": hasNext, + } +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/preference.go b/vendor/github.com/mattermost/mattermost/server/public/model/preference.go new file mode 100644 index 00000000..d2ebb010 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/preference.go @@ -0,0 +1,188 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "encoding/json" + "net/http" + "regexp" + "strconv" + "strings" + "unicode/utf8" +) + +const ( + // The primary key for the preference table is the combination of User.Id, Category, and Name. + + // PreferenceCategoryDirectChannelShow and PreferenceCategoryGroupChannelShow + // are used to store the user's preferences for which channels to show in the sidebar. + // The Name field is the channel ID. + PreferenceCategoryDirectChannelShow = "direct_channel_show" + PreferenceCategoryGroupChannelShow = "group_channel_show" + // PreferenceCategoryTutorialStep is used to store the user's progress in the tutorial. + // The Name field is the user ID again (for whatever reason). + PreferenceCategoryTutorialSteps = "tutorial_step" + // PreferenceCategoryAdvancedSettings has settings for the user's advanced settings. + // The Name field is the setting name. Possible values are: + // - "formatting" + // - "send_on_ctrl_enter" + // - "join_leave" + // - "unread_scroll_position" + // - "sync_drafts" + // - "feature_enabled_markdown_preview" <- deprecated in favor of "formatting" + PreferenceCategoryAdvancedSettings = "advanced_settings" + // PreferenceCategoryFlaggedPost is used to store the user's saved posts. + // The Name field is the post ID. + PreferenceCategoryFlaggedPost = "flagged_post" + // PreferenceCategoryFavoriteChannel is used to store the user's favorite channels to be + // shown in the sidebar. The Name field is the channel ID. + PreferenceCategoryFavoriteChannel = "favorite_channel" + // PreferenceCategorySidebarSettings is used to store the user's sidebar settings. + // The Name field is the setting name. (ie. PreferenceNameShowUnreadSection or PreferenceLimitVisibleDmsGms) + PreferenceCategorySidebarSettings = "sidebar_settings" + // PreferenceCategoryDisplaySettings is used to store the user's various display settings. + // The possible Name fields are: + // - PreferenceNameUseMilitaryTime + // - PreferenceNameCollapseSetting + // - PreferenceNameMessageDisplay + // - PreferenceNameCollapseConsecutive + // - PreferenceNameColorizeUsernames + // - PreferenceNameChannelDisplayMode + // - PreferenceNameNameFormat + PreferenceCategoryDisplaySettings = "display_settings" + // PreferenceCategorySystemNotice is used store system admin notices. + // Possible Name values are not defined here. It can be anything with the notice name. + PreferenceCategorySystemNotice = "system_notice" + // Deprecated: PreferenceCategoryLast is not used anymore. + PreferenceCategoryLast = "last" + // PreferenceCategoryCustomStatus is used to store the user's custom status preferences. + // Possible Name values are: + // - PreferenceNameRecentCustomStatuses + // - PreferenceNameCustomStatusTutorialState + // - PreferenceCustomStatusModalViewed + PreferenceCategoryCustomStatus = "custom_status" + // PreferenceCategoryNotifications is used to store the user's notification settings. + // Possible Name values are: + // - PreferenceNameEmailInterval + PreferenceCategoryNotifications = "notifications" + + // Deprecated: PreferenceRecommendedNextSteps is not used anymore. + // Use PreferenceCategoryRecommendedNextSteps instead. + // PreferenceRecommendedNextSteps is actually a Category. The only possible + // Name vaule is PreferenceRecommendedNextStepsHide for now. + PreferenceRecommendedNextSteps = PreferenceCategoryRecommendedNextSteps + PreferenceCategoryRecommendedNextSteps = "recommended_next_steps" + + // PreferenceCategoryTheme has the name for the team id where theme is set. + PreferenceCategoryTheme = "theme" + + PreferenceNameCollapsedThreadsEnabled = "collapsed_reply_threads" + PreferenceNameChannelDisplayMode = "channel_display_mode" + PreferenceNameCollapseSetting = "collapse_previews" + PreferenceNameMessageDisplay = "message_display" + PreferenceNameCollapseConsecutive = "collapse_consecutive_messages" + PreferenceNameColorizeUsernames = "colorize_usernames" + PreferenceNameNameFormat = "name_format" + PreferenceNameUseMilitaryTime = "use_military_time" + + PreferenceNameShowUnreadSection = "show_unread_section" + PreferenceLimitVisibleDmsGms = "limit_visible_dms_gms" + + PreferenceMaxLimitVisibleDmsGmsValue = 40 + MaxPreferenceValueLength = 20000 + + PreferenceCategoryAuthorizedOAuthApp = "oauth_app" + // the name for oauth_app is the client_id and value is the current scope + + // Deprecated: PreferenceCategoryLastChannel is not used anymore. + PreferenceNameLastChannel = "channel" + // Deprecated: PreferenceCategoryLastTeam is not used anymore. + PreferenceNameLastTeam = "team" + + PreferenceNameRecentCustomStatuses = "recent_custom_statuses" + PreferenceNameCustomStatusTutorialState = "custom_status_tutorial_state" + PreferenceCustomStatusModalViewed = "custom_status_modal_viewed" + + PreferenceNameEmailInterval = "email_interval" + + PreferenceEmailIntervalNoBatchingSeconds = "30" // the "immediate" setting is actually 30s + PreferenceEmailIntervalBatchingSeconds = "900" // fifteen minutes is 900 seconds + PreferenceEmailIntervalImmediately = "immediately" + PreferenceEmailIntervalFifteen = "fifteen" + PreferenceEmailIntervalFifteenAsSeconds = "900" + PreferenceEmailIntervalHour = "hour" + PreferenceEmailIntervalHourAsSeconds = "3600" + PreferenceCloudUserEphemeralInfo = "cloud_user_ephemeral_info" + + PreferenceNameRecommendedNextStepsHide = "hide" +) + +type Preference struct { + UserId string `json:"user_id"` + Category string `json:"category"` + Name string `json:"name"` + Value string `json:"value"` +} + +type Preferences []Preference + +func (o *Preference) IsValid() *AppError { + if !IsValidId(o.UserId) { + return NewAppError("Preference.IsValid", "model.preference.is_valid.id.app_error", nil, "user_id="+o.UserId, http.StatusBadRequest) + } + + if o.Category == "" || len(o.Category) > 32 { + return NewAppError("Preference.IsValid", "model.preference.is_valid.category.app_error", nil, "category="+o.Category, http.StatusBadRequest) + } + + if len(o.Name) > 32 { + return NewAppError("Preference.IsValid", "model.preference.is_valid.name.app_error", nil, "name="+o.Name, http.StatusBadRequest) + } + + if utf8.RuneCountInString(o.Value) > MaxPreferenceValueLength { + return NewAppError("Preference.IsValid", "model.preference.is_valid.value.app_error", nil, "value="+o.Value, http.StatusBadRequest) + } + + if o.Category == PreferenceCategoryTheme { + var unused map[string]string + if err := json.NewDecoder(strings.NewReader(o.Value)).Decode(&unused); err != nil { + return NewAppError("Preference.IsValid", "model.preference.is_valid.theme.app_error", nil, "value="+o.Value, http.StatusBadRequest).Wrap(err) + } + } + + if o.Category == PreferenceCategorySidebarSettings && o.Name == PreferenceLimitVisibleDmsGms { + visibleDmsGmsValue, convErr := strconv.Atoi(o.Value) + if convErr != nil || visibleDmsGmsValue < 1 || visibleDmsGmsValue > PreferenceMaxLimitVisibleDmsGmsValue { + return NewAppError("Preference.IsValid", "model.preference.is_valid.limit_visible_dms_gms.app_error", nil, "value="+o.Value, http.StatusBadRequest) + } + } + + return nil +} + +var preUpdateColorPattern = regexp.MustCompile(`^#[0-9a-fA-F]{3}([0-9a-fA-F]{3})?$`) + +func (o *Preference) PreUpdate() { + if o.Category == PreferenceCategoryTheme { + // decode the value of theme (a map of strings to string) and eliminate any invalid values + var props map[string]string + // just continue, the invalid preference value should get caught by IsValid before saving + json.NewDecoder(strings.NewReader(o.Value)).Decode(&props) + + // blank out any invalid theme values + for name, value := range props { + if name == "image" || name == "type" || name == "codeTheme" { + continue + } + + if !preUpdateColorPattern.MatchString(value) { + props[name] = "#ffffff" + } + } + + if b, err := json.Marshal(props); err == nil { + o.Value = string(b) + } + } +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/product_notices.go b/vendor/github.com/mattermost/mattermost/server/public/model/product_notices.go new file mode 100644 index 00000000..d2d81cac --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/product_notices.go @@ -0,0 +1,220 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "encoding/json" + "io" + + "github.com/pkg/errors" +) + +type ProductNotices []ProductNotice + +func (r *ProductNotices) Marshal() ([]byte, error) { + return json.Marshal(r) +} + +func UnmarshalProductNotices(data []byte) (ProductNotices, error) { + var r ProductNotices + err := json.Unmarshal(data, &r) + return r, err +} + +// List of product notices. Order is important and is used to resolve priorities. +// Each notice will only be show if conditions are met. +type ProductNotice struct { + Conditions Conditions `json:"conditions"` + ID string `json:"id"` // Unique identifier for this notice. Can be a running number. Used for storing 'viewed'; state on the server. + LocalizedMessages map[string]NoticeMessageInternal `json:"localizedMessages"` // Notice message data, organized by locale.; Example:; "localizedMessages": {; "en": { "title": "English", description: "English description"},; "frFR": { "title": "Frances", description: "French description"}; } + Repeatable *bool `json:"repeatable,omitempty"` // Configurable flag if the notice should reappear after it’s seen and dismissed +} + +func (n *ProductNotice) SysAdminOnly() bool { + return n.Conditions.Audience != nil && *n.Conditions.Audience == NoticeAudienceSysadmin +} + +func (n *ProductNotice) TeamAdminOnly() bool { + return n.Conditions.Audience != nil && *n.Conditions.Audience == NoticeAudienceTeamAdmin +} + +type Conditions struct { + Audience *NoticeAudience `json:"audience,omitempty"` + ClientType *NoticeClientType `json:"clientType,omitempty"` // Only show the notice on specific clients. Defaults to 'all' + DesktopVersion []string `json:"desktopVersion,omitempty"` // What desktop client versions does this notice apply to.; Format: semver ranges (https://devhints.io/semver); Example: [">=1.2.3 < ~2.4.x"]; Example: ["= 2020-03-01T00:00:00Z" - show after specified date; "< 2020-03-01T00:00:00Z" - show before the specified date; "> 2020-03-01T00:00:00Z <= 2020-04-01T00:00:00Z" - show only between the specified dates + InstanceType *NoticeInstanceType `json:"instanceType,omitempty"` + MobileVersion []string `json:"mobileVersion,omitempty"` // What mobile client versions does this notice apply to.; Format: semver ranges (https://devhints.io/semver); Example: [">=1.2.3 < ~2.4.x"]; Example: ["=1.2.3 < ~2.4.x"]; Example: [" PropertyFieldNameMaxRunes { + return NewAppError("PropertyFieldPatch.IsValid", "model.property_field.is_valid.app_error", map[string]any{"FieldName": "name", "Reason": "value exceeds maximum length"}, "", http.StatusBadRequest) + } + + if pfp.TargetType != nil && utf8.RuneCountInString(*pfp.TargetType) > PropertyFieldTargetTypeMaxRunes { + return NewAppError("PropertyFieldPatch.IsValid", "model.property_field.is_valid.app_error", map[string]any{"FieldName": "target_type", "Reason": "value exceeds maximum length"}, "", http.StatusBadRequest) + } + + if pfp.TargetID != nil && utf8.RuneCountInString(*pfp.TargetID) > PropertyFieldTargetIDMaxRunes { + return NewAppError("PropertyFieldPatch.IsValid", "model.property_field.is_valid.app_error", map[string]any{"FieldName": "target_id", "Reason": "value exceeds maximum length"}, "", http.StatusBadRequest) + } + + if pfp.Type != nil && + *pfp.Type != PropertyFieldTypeText && + *pfp.Type != PropertyFieldTypeSelect && + *pfp.Type != PropertyFieldTypeMultiselect && + *pfp.Type != PropertyFieldTypeDate && + *pfp.Type != PropertyFieldTypeUser && + *pfp.Type != PropertyFieldTypeMultiuser { + return NewAppError("PropertyFieldPatch.IsValid", "model.property_field.is_valid.app_error", map[string]any{"FieldName": "type", "Reason": "unknown value"}, "", http.StatusBadRequest) + } + + return nil +} + +func (pf *PropertyField) Patch(patch *PropertyFieldPatch) { + if patch.Name != nil { + pf.Name = *patch.Name + } + + if patch.Type != nil { + pf.Type = *patch.Type + } + + if patch.Attrs != nil { + pf.Attrs = *patch.Attrs + } + + if patch.TargetID != nil { + pf.TargetID = *patch.TargetID + } + + if patch.TargetType != nil { + pf.TargetType = *patch.TargetType + } +} + +type PropertyFieldSearchCursor struct { + PropertyFieldID string + CreateAt int64 +} + +func (p PropertyFieldSearchCursor) IsEmpty() bool { + return p.PropertyFieldID == "" && p.CreateAt == 0 +} + +func (p PropertyFieldSearchCursor) IsValid() error { + if p.IsEmpty() { + return nil + } + + if p.CreateAt <= 0 { + return errors.New("create at cannot be negative or zero") + } + + if !IsValidId(p.PropertyFieldID) { + return errors.New("property field id is invalid") + } + return nil +} + +type PropertyFieldSearchOpts struct { + GroupID string + TargetType string + TargetIDs []string + SinceUpdateAt int64 // UpdatedAt after which to send the items + IncludeDeleted bool + Cursor PropertyFieldSearchCursor + PerPage int +} + +func (pf *PropertyField) GetAttr(key string) any { + return pf.Attrs[key] +} + +const PropertyFieldAttributeOptions = "options" + +type PropertyOption interface { + GetID() string + GetName() string + SetID(id string) + IsValid() error +} + +type PropertyOptions[T PropertyOption] []T + +func NewPropertyOptionsFromFieldAttrs[T PropertyOption](optionsArr any) (PropertyOptions[T], error) { + options := PropertyOptions[T]{} + b, err := json.Marshal(optionsArr) + if err != nil { + return nil, fmt.Errorf("failed to marshal options: %w", err) + } + + err = json.Unmarshal(b, &options) + if err != nil { + return nil, fmt.Errorf("failed to unmarshal options: %w", err) + } + + for i := range options { + if options[i].GetID() == "" { + options[i].SetID(NewId()) + } + } + + return options, nil +} + +func (p PropertyOptions[T]) IsValid() error { + if len(p) == 0 { + return errors.New("options list cannot be empty") + } + + seenNames := make(map[string]struct{}) + for i, option := range p { + if err := option.IsValid(); err != nil { + return fmt.Errorf("invalid option at index %d: %w", i, err) + } + + if _, exists := seenNames[option.GetName()]; exists { + return fmt.Errorf("duplicate option name found at index %d: %s", i, option.GetName()) + } + seenNames[option.GetName()] = struct{}{} + } + + return nil +} + +// PluginPropertyOption provides a simple implementation of PropertyOption for plugins +// using a map[string]string for flexible key-value storage +type PluginPropertyOption struct { + Data map[string]string `json:"data"` +} + +func NewPluginPropertyOption(id, name string) *PluginPropertyOption { + return &PluginPropertyOption{ + Data: map[string]string{ + "id": id, + "name": name, + }, + } +} + +func (p *PluginPropertyOption) GetID() string { + if p.Data == nil { + return "" + } + return p.Data["id"] +} + +func (p *PluginPropertyOption) GetName() string { + if p.Data == nil { + return "" + } + return p.Data["name"] +} + +func (p *PluginPropertyOption) SetID(id string) { + if p.Data == nil { + p.Data = make(map[string]string) + } + p.Data["id"] = id +} + +func (p *PluginPropertyOption) IsValid() error { + if p.Data == nil { + return errors.New("data cannot be nil") + } + + id := p.GetID() + if id == "" { + return errors.New("id cannot be empty") + } + + if !IsValidId(id) { + return errors.New("id is not a valid ID") + } + + name := p.GetName() + if name == "" { + return errors.New("name cannot be empty") + } + + return nil +} + +// GetValue retrieves a custom value from the option data +func (p *PluginPropertyOption) GetValue(key string) string { + if p.Data == nil { + return "" + } + return p.Data[key] +} + +// SetValue sets a custom value in the option data +func (p *PluginPropertyOption) SetValue(key, value string) { + if p.Data == nil { + p.Data = make(map[string]string) + } + p.Data[key] = value +} + +// MarshalJSON implements custom JSON marshaling to avoid wrapping in "data" +func (p *PluginPropertyOption) MarshalJSON() ([]byte, error) { + if p.Data == nil { + return json.Marshal(map[string]string{}) + } + return json.Marshal(p.Data) +} + +// UnmarshalJSON implements custom JSON unmarshaling to handle unwrapped JSON +func (p *PluginPropertyOption) UnmarshalJSON(data []byte) error { + var result map[string]string + if err := json.Unmarshal(data, &result); err != nil { + return err + } + p.Data = result + return nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/property_group.go b/vendor/github.com/mattermost/mattermost/server/public/model/property_group.go new file mode 100644 index 00000000..b8a28fe5 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/property_group.go @@ -0,0 +1,15 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +type PropertyGroup struct { + ID string + Name string +} + +func (pg *PropertyGroup) PreSave() { + if pg.ID == "" { + pg.ID = NewId() + } +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/property_value.go b/vendor/github.com/mattermost/mattermost/server/public/model/property_value.go new file mode 100644 index 00000000..4ae731f9 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/property_value.go @@ -0,0 +1,119 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "encoding/json" + "net/http" + "unicode/utf8" + + "github.com/pkg/errors" +) + +const ( + PropertyValueTargetIDMaxRunes = 255 + PropertyValueTargetTypeMaxRunes = 255 + + PropertyValueTargetTypePost = "post" + PropertyValueTargetTypeUser = "user" +) + +type PropertyValue struct { + ID string `json:"id"` + TargetID string `json:"target_id"` + TargetType string `json:"target_type"` + GroupID string `json:"group_id"` + FieldID string `json:"field_id"` + Value json.RawMessage `json:"value"` + CreateAt int64 `json:"create_at"` + UpdateAt int64 `json:"update_at"` + DeleteAt int64 `json:"delete_at"` +} + +func (pv *PropertyValue) PreSave() { + if pv.ID == "" { + pv.ID = NewId() + } + + if pv.CreateAt == 0 { + pv.CreateAt = GetMillis() + } + pv.UpdateAt = pv.CreateAt +} + +func (pv *PropertyValue) IsValid() error { + if !IsValidId(pv.ID) { + return NewAppError("PropertyValue.IsValid", "model.property_value.is_valid.app_error", map[string]any{"FieldName": "id", "Reason": "invalid id"}, "", http.StatusBadRequest) + } + + if !IsValidId(pv.TargetID) { + return NewAppError("PropertyValue.IsValid", "model.property_value.is_valid.app_error", map[string]any{"FieldName": "target_id", "Reason": "invalid id"}, "id="+pv.ID, http.StatusBadRequest) + } + + if pv.TargetType == "" { + return NewAppError("PropertyValue.IsValid", "model.property_value.is_valid.app_error", map[string]any{"FieldName": "target_type", "Reason": "value cannot be empty"}, "id="+pv.ID, http.StatusBadRequest) + } + + if utf8.RuneCountInString(pv.TargetType) > PropertyValueTargetTypeMaxRunes { + return NewAppError("PropertyValue.IsValid", "model.property_value.is_valid.app_error", map[string]any{"FieldName": "target_type", "Reason": "value exceeds maximum length"}, "id="+pv.ID, http.StatusBadRequest) + } + + if utf8.RuneCountInString(pv.TargetID) > PropertyValueTargetIDMaxRunes { + return NewAppError("PropertyValue.IsValid", "model.property_value.is_valid.app_error", map[string]any{"FieldName": "target_id", "Reason": "value exceeds maximum length"}, "id="+pv.ID, http.StatusBadRequest) + } + + if !IsValidId(pv.GroupID) { + return NewAppError("PropertyValue.IsValid", "model.property_value.is_valid.app_error", map[string]any{"FieldName": "group_id", "Reason": "invalid id"}, "id="+pv.ID, http.StatusBadRequest) + } + + if !IsValidId(pv.FieldID) { + return NewAppError("PropertyValue.IsValid", "model.property_value.is_valid.app_error", map[string]any{"FieldName": "field_id", "Reason": "invalid id"}, "id="+pv.ID, http.StatusBadRequest) + } + + if pv.CreateAt == 0 { + return NewAppError("PropertyValue.IsValid", "model.property_value.is_valid.app_error", map[string]any{"FieldName": "create_at", "Reason": "value cannot be zero"}, "id="+pv.ID, http.StatusBadRequest) + } + + if pv.UpdateAt == 0 { + return NewAppError("PropertyValue.IsValid", "model.property_value.is_valid.app_error", map[string]any{"FieldName": "update_at", "Reason": "value cannot be zero"}, "id="+pv.ID, http.StatusBadRequest) + } + + return nil +} + +type PropertyValueSearchCursor struct { + PropertyValueID string + CreateAt int64 +} + +func (p PropertyValueSearchCursor) IsEmpty() bool { + return p.PropertyValueID == "" && p.CreateAt == 0 +} + +func (p PropertyValueSearchCursor) IsValid() error { + if p.IsEmpty() { + return nil + } + + if p.CreateAt <= 0 { + return errors.New("create at cannot be negative or zero") + } + + if !IsValidId(p.PropertyValueID) { + return errors.New("property field id is invalid") + } + return nil +} + +type PropertyValueSearchOpts struct { + GroupID string + TargetType string + TargetIDs []string + FieldID string + SinceUpdateAt int64 // UpdateAt after which to send the items + IncludeDeleted bool + Cursor PropertyValueSearchCursor + PerPage int + Value json.RawMessage +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/push_notification.go b/vendor/github.com/mattermost/mattermost/server/public/model/push_notification.go new file mode 100644 index 00000000..c61daab9 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/push_notification.go @@ -0,0 +1,104 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "strings" +) + +const ( + PushNotifyApple = "apple" + PushNotifyAndroid = "android" + PushNotifyAppleReactNative = "apple_rn" + PushNotifyAndroidReactNative = "android_rn" + + PushTypeMessage = "message" + PushTypeClear = "clear" + PushTypeUpdateBadge = "update_badge" + PushTypeSession = "session" + PushTypeTest = "test" + PushMessageV2 = "v2" + + PushSoundNone = "none" + + // The category is set to handle a set of interactive Actions + // with the push notifications + CategoryCanReply = "CAN_REPLY" + + // Push notification server URLs + // Legacy URLs are DNS aliases that automatically route to the regional endpoints + MHPNSLegacyUS = "https://push.mattermost.com" + MHPNSLegacyDE = "https://hpns-de.mattermost.com" + // Current regional URLs + MHPNSGlobal = "https://global.push.mattermost.com" + MHPNSUS = "https://us.push.mattermost.com" + MHPNSEU = "https://eu.push.mattermost.com" + MHPNSAP = "https://ap.push.mattermost.com" + MHPNS = MHPNSUS // Legacy constant for backwards compatibility + + PushSendPrepare = "Prepared to send" + PushSendSuccess = "Successful" + PushNotSent = "Not Sent due to preferences" + PushReceived = "Received by device" +) + +// PushSubType allows for passing additional message type information +// to mobile clients in a backwards-compatible way +type PushSubType string + +// PushSubTypeCalls is used by the Calls plugin +const PushSubTypeCalls PushSubType = "calls" + +type PushNotificationAck struct { + Id string `json:"id"` + ClientReceivedAt int64 `json:"received_at"` + ClientPlatform string `json:"platform"` + NotificationType string `json:"type"` + PostId string `json:"post_id,omitempty"` + IsIdLoaded bool `json:"is_id_loaded"` +} + +type PushNotification struct { + AckId string `json:"ack_id"` + Platform string `json:"platform"` + ServerId string `json:"server_id"` + DeviceId string `json:"device_id"` + PostId string `json:"post_id"` + Category string `json:"category,omitempty"` + Sound string `json:"sound,omitempty"` + Message string `json:"message,omitempty"` + Badge int `json:"badge,omitempty"` + ContentAvailable int `json:"cont_ava,omitempty"` + TeamId string `json:"team_id,omitempty"` + ChannelId string `json:"channel_id,omitempty"` + RootId string `json:"root_id,omitempty"` + ChannelName string `json:"channel_name,omitempty"` + Type string `json:"type,omitempty"` + SubType PushSubType `json:"sub_type,omitempty"` + SenderId string `json:"sender_id,omitempty"` + SenderName string `json:"sender_name,omitempty"` + OverrideUsername string `json:"override_username,omitempty"` + OverrideIconURL string `json:"override_icon_url,omitempty"` + FromWebhook string `json:"from_webhook,omitempty"` + Version string `json:"version,omitempty"` + IsCRTEnabled bool `json:"is_crt_enabled"` + IsIdLoaded bool `json:"is_id_loaded"` + PostType string `json:"-"` + ChannelType ChannelType `json:"-"` + Signature string `json:"signature"` +} + +func (pn *PushNotification) DeepCopy() *PushNotification { + pnCopy := *pn + return &pnCopy +} + +func (pn *PushNotification) SetDeviceIdAndPlatform(deviceId string) { + index := strings.Index(deviceId, ":") + + if index > -1 { + pn.Platform = deviceId[:index] + pn.DeviceId = deviceId[index+1:] + } +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/push_response.go b/vendor/github.com/mattermost/mattermost/server/public/model/push_response.go new file mode 100644 index 00000000..a88b4339 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/push_response.go @@ -0,0 +1,33 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +const ( + PushStatus = "status" + PushStatusOk = "OK" + PushStatusFail = "FAIL" + PushStatusRemove = "REMOVE" + PushStatusErrorMsg = "error" +) + +type PushResponse map[string]string + +func NewOkPushResponse() PushResponse { + m := make(map[string]string) + m[PushStatus] = PushStatusOk + return m +} + +func NewRemovePushResponse() PushResponse { + m := make(map[string]string) + m[PushStatus] = PushStatusRemove + return m +} + +func NewErrorPushResponse(message string) PushResponse { + m := make(map[string]string) + m[PushStatus] = PushStatusFail + m[PushStatusErrorMsg] = message + return m +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/reaction.go b/vendor/github.com/mattermost/mattermost/server/public/model/reaction.go new file mode 100644 index 00000000..5acf7635 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/reaction.go @@ -0,0 +1,73 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "net/http" + "regexp" +) + +type Reaction struct { + UserId string `json:"user_id"` + PostId string `json:"post_id"` + EmojiName string `json:"emoji_name"` + CreateAt int64 `json:"create_at"` + UpdateAt int64 `json:"update_at"` + DeleteAt int64 `json:"delete_at"` + RemoteId *string `json:"remote_id"` + ChannelId string `json:"channel_id"` +} + +func (o *Reaction) IsValid() *AppError { + if !IsValidId(o.UserId) { + return NewAppError("Reaction.IsValid", "model.reaction.is_valid.user_id.app_error", nil, "user_id="+o.UserId, http.StatusBadRequest) + } + + if !IsValidId(o.PostId) { + return NewAppError("Reaction.IsValid", "model.reaction.is_valid.post_id.app_error", nil, "post_id="+o.PostId, http.StatusBadRequest) + } + + validName := regexp.MustCompile(`^[a-zA-Z0-9\-\+_]+$`) + + if o.EmojiName == "" || len(o.EmojiName) > EmojiNameMaxLength || !validName.MatchString(o.EmojiName) { + return NewAppError("Reaction.IsValid", "model.reaction.is_valid.emoji_name.app_error", nil, "emoji_name="+o.EmojiName, http.StatusBadRequest) + } + + if o.CreateAt == 0 { + return NewAppError("Reaction.IsValid", "model.reaction.is_valid.create_at.app_error", nil, "", http.StatusBadRequest) + } + + if o.UpdateAt == 0 { + return NewAppError("Reaction.IsValid", "model.reaction.is_valid.update_at.app_error", nil, "", http.StatusBadRequest) + } + + return nil +} + +func (o *Reaction) PreSave() { + if o.CreateAt == 0 { + o.CreateAt = GetMillis() + } + o.UpdateAt = GetMillis() + o.DeleteAt = 0 + + if o.RemoteId == nil { + o.RemoteId = NewPointer("") + } +} + +func (o *Reaction) PreUpdate() { + o.UpdateAt = GetMillis() + + if o.RemoteId == nil { + o.RemoteId = NewPointer("") + } +} + +func (o *Reaction) GetRemoteID() string { + if o.RemoteId == nil { + return "" + } + return *o.RemoteId +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/remote_cluster.go b/vendor/github.com/mattermost/mattermost/server/public/model/remote_cluster.go new file mode 100644 index 00000000..cbdbc6c2 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/remote_cluster.go @@ -0,0 +1,522 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "crypto/aes" + "crypto/cipher" + "crypto/pbkdf2" + "crypto/rand" + "crypto/sha256" + "encoding/base32" + "encoding/json" + "errors" + "io" + "net/http" + "net/url" + "regexp" + "strings" + + "golang.org/x/crypto/scrypt" +) + +const ( + RemoteOfflineAfterMillis = 1000 * 60 * 5 // 5 minutes + RemoteNameMinLength = 1 + RemoteNameMaxLength = 64 + + SiteURLPending = "pending_" + SiteURLPlugin = "plugin_" + + BitflagOptionAutoShareDMs Bitmask = 1 << iota // Any new DM/GM is automatically shared + BitflagOptionAutoInvited // Remote is automatically invited to all shared channels +) + +var ( + validRemoteNameChars = regexp.MustCompile(`^[a-zA-Z0-9\.\-\_]+$`) + + ErrOfflineRemote = errors.New("remote is offline") +) + +type Bitmask uint32 + +func (bm *Bitmask) IsBitSet(flag Bitmask) bool { + return *bm != 0 +} + +func (bm *Bitmask) SetBit(flag Bitmask) { + *bm |= flag +} + +func (bm *Bitmask) UnsetBit(flag Bitmask) { + *bm &= ^flag +} + +type RemoteCluster struct { + RemoteId string `json:"remote_id"` + RemoteTeamId string `json:"remote_team_id"` // Deprecated: this field is no longer used. It's only kept for backwards compatibility. + Name string `json:"name"` + DisplayName string `json:"display_name"` + SiteURL string `json:"site_url"` + DefaultTeamId string `json:"default_team_id"` + CreateAt int64 `json:"create_at"` + DeleteAt int64 `json:"delete_at"` + LastPingAt int64 `json:"last_ping_at"` + LastGlobalUserSyncAt int64 `json:"last_global_user_sync_at"` // Timestamp of last global user sync + Token string `json:"token"` + RemoteToken string `json:"remote_token"` + Topics string `json:"topics"` + CreatorId string `json:"creator_id"` + PluginID string `json:"plugin_id"` // non-empty when sync message are to be delivered via plugin API + Options Bitmask `json:"options"` // bit-flag set of options +} + +func (rc *RemoteCluster) Auditable() map[string]any { + return map[string]any{ + "remote_id": rc.RemoteId, + "remote_team_id": rc.RemoteTeamId, + "name": rc.Name, + "display_name": rc.DisplayName, + "site_url": rc.SiteURL, + "default_team_id": rc.DefaultTeamId, + "create_at": rc.CreateAt, + "delete_at": rc.DeleteAt, + "last_ping_at": rc.LastPingAt, + "last_global_user_sync_at": rc.LastGlobalUserSyncAt, + "creator_id": rc.CreatorId, + "plugin_id": rc.PluginID, + "options": rc.Options, + } +} + +func (rc *RemoteCluster) PreSave() { + if rc.RemoteId == "" { + if rc.PluginID != "" { + rc.RemoteId = newIDFromBytes([]byte(rc.PluginID)) + } else { + rc.RemoteId = NewId() + } + } + + if rc.DisplayName == "" { + rc.DisplayName = rc.Name + } + + rc.Name = SanitizeUnicode(rc.Name) + rc.DisplayName = SanitizeUnicode(rc.DisplayName) + rc.Name = NormalizeRemoteName(rc.Name) + + if rc.Token == "" { + rc.Token = NewId() + } + + if rc.CreateAt == 0 { + rc.CreateAt = GetMillis() + } + rc.fixTopics() +} + +func (rc *RemoteCluster) IsValid() *AppError { + if !IsValidId(rc.RemoteId) { + return NewAppError("RemoteCluster.IsValid", "model.cluster.is_valid.id.app_error", nil, "id="+rc.RemoteId, http.StatusBadRequest) + } + + if !IsValidRemoteName(rc.Name) { + return NewAppError("RemoteCluster.IsValid", "model.cluster.is_valid.name.app_error", nil, "name="+rc.Name, http.StatusBadRequest) + } + + if rc.CreateAt == 0 { + return NewAppError("RemoteCluster.IsValid", "model.cluster.is_valid.create_at.app_error", nil, "create_at=0", http.StatusBadRequest) + } + + if !IsValidId(rc.CreatorId) { + return NewAppError("RemoteCluster.IsValid", "model.cluster.is_valid.id.app_error", nil, "creator_id="+rc.CreatorId, http.StatusBadRequest) + } + + if rc.DefaultTeamId != "" && !IsValidId(rc.DefaultTeamId) { + return NewAppError("RemoteCluster.IsValid", "model.cluster.is_valid.id.app_error", nil, "default_team_id="+rc.DefaultTeamId, http.StatusBadRequest) + } + + return nil +} + +func (rc *RemoteCluster) Sanitize() { + rc.Token = "" + rc.RemoteToken = "" +} + +type RemoteClusterPatch struct { + DisplayName *string `json:"display_name"` + DefaultTeamId *string `json:"default_team_id"` +} + +func (rcp *RemoteClusterPatch) Auditable() map[string]any { + return map[string]any{ + "display_name": rcp.DisplayName, + "default_team_id": rcp.DefaultTeamId, + } +} + +func (rc *RemoteCluster) Patch(patch *RemoteClusterPatch) { + if patch.DisplayName != nil { + rc.DisplayName = *patch.DisplayName + } + + if patch.DefaultTeamId != nil { + rc.DefaultTeamId = *patch.DefaultTeamId + } +} + +type RemoteClusterWithPassword struct { + *RemoteCluster + Password string `json:"password"` +} + +type RemoteClusterWithInvite struct { + RemoteCluster *RemoteCluster `json:"remote_cluster"` + Invite string `json:"invite"` + Password string `json:"password,omitempty"` +} + +func newIDFromBytes(b []byte) string { + hash := sha256.New() + _, _ = hash.Write(b) + buf := hash.Sum(nil) + + encoding := base32.NewEncoding("ybndrfg8ejkmcpqxot1uwisza345h769").WithPadding(base32.NoPadding) + id := encoding.EncodeToString(buf) + return id[:26] +} + +func (rc *RemoteCluster) IsOptionFlagSet(flag Bitmask) bool { + return rc.Options.IsBitSet(flag) +} + +func (rc *RemoteCluster) SetOptionFlag(flag Bitmask) { + rc.Options.SetBit(flag) +} + +func (rc *RemoteCluster) UnsetOptionFlag(flag Bitmask) { + rc.Options.UnsetBit(flag) +} + +func IsValidRemoteName(s string) bool { + if len(s) < RemoteNameMinLength || len(s) > RemoteNameMaxLength { + return false + } + return validRemoteNameChars.MatchString(s) +} + +func (rc *RemoteCluster) PreUpdate() { + if rc.DisplayName == "" { + rc.DisplayName = rc.Name + } + + rc.Name = SanitizeUnicode(rc.Name) + rc.DisplayName = SanitizeUnicode(rc.DisplayName) + rc.Name = NormalizeRemoteName(rc.Name) + rc.fixTopics() +} + +func (rc *RemoteCluster) IsOnline() bool { + return rc.LastPingAt > GetMillis()-RemoteOfflineAfterMillis +} + +func (rc *RemoteCluster) IsConfirmed() bool { + if rc.IsPlugin() { + return true // local plugins are automatically confirmed + } + + if rc.SiteURL != "" && !strings.HasPrefix(rc.SiteURL, SiteURLPending) { + return true // empty or pending siteurl are not confirmed + } + return false +} + +func (rc *RemoteCluster) IsPlugin() bool { + if rc.PluginID != "" || strings.HasPrefix(rc.SiteURL, SiteURLPlugin) { + return true // local plugins are automatically confirmed + } + return false +} + +func (rc *RemoteCluster) GetSiteURL() string { + siteURL := rc.SiteURL + if strings.HasPrefix(siteURL, SiteURLPending) { + siteURL = "..." + } + if strings.HasPrefix(siteURL, SiteURLPending) || strings.HasPrefix(siteURL, SiteURLPlugin) { + siteURL = "plugin" + } + return siteURL +} + +// fixTopics ensures all topics are separated by one, and only one, space. +func (rc *RemoteCluster) fixTopics() { + trimmed := strings.TrimSpace(rc.Topics) + if trimmed == "" || trimmed == "*" { + rc.Topics = trimmed + return + } + + var sb strings.Builder + sb.WriteString(" ") + + ss := strings.SplitSeq(rc.Topics, " ") + for c := range ss { + cc := strings.TrimSpace(c) + if cc != "" { + sb.WriteString(cc) + sb.WriteString(" ") + } + } + rc.Topics = sb.String() +} + +func (rc *RemoteCluster) ToRemoteClusterInfo() RemoteClusterInfo { + return RemoteClusterInfo{ + Name: rc.Name, + DisplayName: rc.DisplayName, + CreateAt: rc.CreateAt, + DeleteAt: rc.DeleteAt, + LastPingAt: rc.LastPingAt, + } +} + +func NormalizeRemoteName(name string) string { + return strings.ToLower(name) +} + +// RemoteClusterInfo provides a subset of RemoteCluster fields suitable for sending to clients. +type RemoteClusterInfo struct { + Name string `json:"name"` + DisplayName string `json:"display_name"` + CreateAt int64 `json:"create_at"` + DeleteAt int64 `json:"delete_at"` + LastPingAt int64 `json:"last_ping_at"` +} + +// RemoteClusterFrame wraps a `RemoteClusterMsg` with credentials specific to a remote cluster. +type RemoteClusterFrame struct { + RemoteId string `json:"remote_id"` + Msg RemoteClusterMsg `json:"msg"` +} + +func (f *RemoteClusterFrame) Auditable() map[string]any { + return map[string]any{ + "remote_id": f.RemoteId, + "msg_id": f.Msg.Id, + "topic": f.Msg.Topic, + } +} + +func (f *RemoteClusterFrame) IsValid() *AppError { + if !IsValidId(f.RemoteId) { + return NewAppError("RemoteClusterFrame.IsValid", "api.remote_cluster.invalid_id.app_error", nil, "RemoteId="+f.RemoteId, http.StatusBadRequest) + } + + if appErr := f.Msg.IsValid(); appErr != nil { + return appErr + } + + return nil +} + +// RemoteClusterMsg represents a message that is sent and received between clusters. +// These are processed and routed via the RemoteClusters service. +type RemoteClusterMsg struct { + Id string `json:"id"` + Topic string `json:"topic"` + CreateAt int64 `json:"create_at"` + Payload json.RawMessage `json:"payload"` +} + +func NewRemoteClusterMsg(topic string, payload json.RawMessage) RemoteClusterMsg { + return RemoteClusterMsg{ + Id: NewId(), + Topic: topic, + CreateAt: GetMillis(), + Payload: payload, + } +} + +func (m RemoteClusterMsg) IsValid() *AppError { + if !IsValidId(m.Id) { + return NewAppError("RemoteClusterMsg.IsValid", "api.remote_cluster.invalid_id.app_error", nil, "Id="+m.Id, http.StatusBadRequest) + } + + if m.Topic == "" { + return NewAppError("RemoteClusterMsg.IsValid", "api.remote_cluster.invalid_topic.app_error", nil, "Topic empty", http.StatusBadRequest) + } + + if len(m.Payload) == 0 { + return NewAppError("RemoteClusterMsg.IsValid", "api.context.invalid_body_param.app_error", map[string]any{"Name": "PayLoad"}, "", http.StatusBadRequest) + } + + return nil +} + +// RemoteClusterPing represents a ping that is sent and received between clusters +// to indicate a connection is alive. This is the payload for a `RemoteClusterMsg`. +type RemoteClusterPing struct { + SentAt int64 `json:"sent_at"` + RecvAt int64 `json:"recv_at"` +} + +// RemoteClusterInvite represents an invitation to establish a simple trust with a remote cluster. +type RemoteClusterInvite struct { + RemoteId string `json:"remote_id"` + RemoteTeamId string `json:"remote_team_id"` // Deprecated: this field is no longer used. It's only kept for backwards compatibility. + SiteURL string `json:"site_url"` + Token string `json:"token"` + RefreshedToken string `json:"refreshed_token,omitempty"` // New token generated by the remote cluster when accepting an invitation + Version int `json:"version,omitempty"` +} + +func (rci *RemoteClusterInvite) IsValid() *AppError { + if !IsValidId(rci.RemoteId) { + return NewAppError("RemoteClusterInvite.IsValid", "model.remote_cluster_invite.is_valid.remote_id.app_error", nil, "id="+rci.RemoteId, http.StatusBadRequest) + } + + if rci.Token == "" { + return NewAppError("RemoteClusterInvite.IsValid", "model.remote_cluster_invite.is_valid.token.app_error", nil, "Token empty", http.StatusBadRequest) + } + + if _, err := url.ParseRequestURI(rci.SiteURL); err != nil { + return NewAppError("RemoteClusterInvite.IsValid", "model.remote_cluster_invite.is_valid.site_url.app_error", nil, "", http.StatusBadRequest).Wrap(err) + } + + return nil +} + +func (rci *RemoteClusterInvite) Encrypt(password string) ([]byte, error) { + raw, err := json.Marshal(&rci) + if err != nil { + return nil, err + } + + // create random salt to be prepended to the blob. + salt := make([]byte, 16) + if _, err = io.ReadFull(rand.Reader, salt); err != nil { + return nil, err + } + + var key []byte + if rci.Version >= 3 { + // Use PBKDF2 for version 3 and above + key, err = pbkdf2.Key(sha256.New, password, salt, 600000, 32) + if err != nil { + return nil, err + } + } else { + // Use scrypt for older versions + key, err = scrypt.Key([]byte(password), salt, 32768, 8, 1, 32) + if err != nil { + return nil, err + } + } + + block, err := aes.NewCipher(key[:]) + if err != nil { + return nil, err + } + + gcm, err := cipher.NewGCM(block) + if err != nil { + return nil, err + } + + // create random nonce + nonce := make([]byte, gcm.NonceSize()) + if _, err = io.ReadFull(rand.Reader, nonce); err != nil { + return nil, err + } + + // prefix the nonce to the cyphertext so we don't need to keep track of it. + sealed := gcm.Seal(nonce, nonce, raw, nil) + + return append(salt, sealed...), nil //nolint:makezero +} + +func (rci *RemoteClusterInvite) Decrypt(encrypted []byte, password string) error { + if len(encrypted) <= 16 { + return errors.New("invalid length") + } + + // first 16 bytes is the salt that was used to derive a key + salt := encrypted[:16] + encrypted = encrypted[16:] + + // Try PBKDF2 first (for version 3+) + if err := rci.tryDecrypt(encrypted, password, salt, true); err == nil { + return nil + } + + // Fall back to scrypt (for older versions) + return rci.tryDecrypt(encrypted, password, salt, false) +} + +func (rci *RemoteClusterInvite) tryDecrypt(encrypted []byte, password string, salt []byte, usePBKDF2 bool) error { + var key []byte + var err error + + if usePBKDF2 { + // Use PBKDF2 for version 3 and above + key, err = pbkdf2.Key(sha256.New, password, salt, 600000, 32) + if err != nil { + return err + } + } else { + // Use scrypt for older versions + key, err = scrypt.Key([]byte(password), salt, 32768, 8, 1, 32) + if err != nil { + return err + } + } + + block, err := aes.NewCipher(key[:]) + if err != nil { + return err + } + + gcm, err := cipher.NewGCM(block) + if err != nil { + return err + } + + // nonce was prefixed to the cyphertext when encrypting so we need to extract it. + nonceSize := gcm.NonceSize() + nonce, cyphertext := encrypted[:nonceSize], encrypted[nonceSize:] + + plain, err := gcm.Open(nil, nonce, cyphertext, nil) + if err != nil { + return err + } + + // try to unmarshall the decrypted JSON to this invite struct. + return json.Unmarshal(plain, &rci) +} + +type RemoteClusterAcceptInvite struct { + Name string `json:"name"` + DisplayName string `json:"display_name"` + DefaultTeamId string `json:"default_team_id"` + Invite string `json:"invite"` + Password string `json:"password"` +} + +// RemoteClusterQueryFilter provides filter criteria for RemoteClusterStore.GetAll +type RemoteClusterQueryFilter struct { + ExcludeOffline bool + InChannel string + NotInChannel string + Topic string + CreatorId string + OnlyConfirmed bool + PluginID string + OnlyPlugins bool + ExcludePlugins bool + RequireOptions Bitmask + IncludeDeleted bool +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/report.go b/vendor/github.com/mattermost/mattermost/server/public/model/report.go new file mode 100644 index 00000000..8304a364 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/report.go @@ -0,0 +1,162 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "net/http" + "slices" + "strconv" + "time" +) + +const ( + ReportDurationAllTime = "all_time" + ReportDurationLast30Days = "last_30_days" + ReportDurationPreviousMonth = "previous_month" + ReportDurationLast6Months = "last_6_months" + + ReportingMaxPageSize = 100 +) + +var ( + ReportExportFormats = []string{"csv"} + + UserReportSortColumns = []string{"CreateAt", "Username", "FirstName", "LastName", "Nickname", "Email", "Roles"} +) + +type ReportableObject interface { + ToReport() []string +} + +type ReportingBaseOptions struct { + SortDesc bool + Direction string // Accepts only "prev" or "next" + PageSize int + SortColumn string + FromColumnValue string + FromId string + DateRange string + StartAt int64 + EndAt int64 +} + +func GetReportDateRange(dateRange string, now time.Time) (int64, int64) { + startAt := int64(0) + endAt := int64(0) + + if dateRange == ReportDurationLast30Days { + startAt = now.AddDate(0, 0, -30).UnixMilli() + } else if dateRange == ReportDurationPreviousMonth { + startOfMonth := time.Date(now.Year(), now.Month(), 1, 0, 0, 0, 0, time.Local) + startAt = startOfMonth.AddDate(0, -1, 0).UnixMilli() + endAt = startOfMonth.UnixMilli() + } else if dateRange == ReportDurationLast6Months { + startAt = now.AddDate(0, -6, -0).UnixMilli() + } + + return startAt, endAt +} + +func (options *ReportingBaseOptions) PopulateDateRange(now time.Time) { + startAt, endAt := GetReportDateRange(options.DateRange, now) + + options.StartAt = startAt + options.EndAt = endAt +} + +func (options *ReportingBaseOptions) IsValid() *AppError { + if options.EndAt > 0 && options.StartAt > options.EndAt { + return NewAppError("ReportingBaseOptions.IsValid", "model.reporting_base_options.is_valid.bad_date_range", nil, "", http.StatusBadRequest) + } + + return nil +} + +type UserReportQuery struct { + User + UserPostStats +} + +type UserReport struct { + User + UserPostStats +} + +func (u *UserReport) ToReport() []string { + lastStatusAt := "" + if u.LastStatusAt != nil { + lastStatusAt = time.UnixMilli(*u.LastStatusAt).String() + } + lastPostDate := "" + if u.LastPostDate != nil { + lastPostDate = time.UnixMilli(*u.LastPostDate).String() + } + daysActive := "" + if u.DaysActive != nil { + daysActive = strconv.Itoa(*u.DaysActive) + } + totalPosts := "" + if u.TotalPosts != nil { + totalPosts = strconv.Itoa(*u.TotalPosts) + } + lastLogin := "" + if u.LastLogin > 0 { + lastLogin = time.UnixMilli(u.LastLogin).String() + } + + deleteAt := "" + if u.DeleteAt > 0 { + deleteAt = time.UnixMilli(u.DeleteAt).String() + } + + return []string{ + u.Id, + u.Username, + u.Email, + time.UnixMilli(u.CreateAt).String(), + u.User.GetDisplayName(ShowNicknameFullName), + u.Roles, + lastLogin, + lastStatusAt, + lastPostDate, + daysActive, + totalPosts, + deleteAt, + } +} + +type UserReportOptions struct { + ReportingBaseOptions + Role string + Team string + HasNoTeam bool + HideActive bool + HideInactive bool + SearchTerm string +} + +func (u *UserReportOptions) IsValid() *AppError { + if appErr := u.ReportingBaseOptions.IsValid(); appErr != nil { + return appErr + } + + // Validate against the columns we allow sorting for + if !slices.Contains(UserReportSortColumns, u.SortColumn) { + return NewAppError("UserReportOptions.IsValid", "model.user_report_options.is_valid.invalid_sort_column", nil, "", http.StatusBadRequest) + } + + return nil +} + +func (u *UserReportQuery) ToReport() *UserReport { + u.ClearNonProfileFields(true) + return &UserReport{ + User: u.User, + UserPostStats: u.UserPostStats, + } +} + +func IsValidReportExportFormat(format string) bool { + return slices.Contains(ReportExportFormats, format) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/role.go b/vendor/github.com/mattermost/mattermost/server/public/model/role.go new file mode 100644 index 00000000..bbd17f3b --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/role.go @@ -0,0 +1,1209 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "fmt" + "strings" + + "github.com/mattermost/mattermost/server/public/utils/timeutils" +) + +// SysconsoleAncillaryPermissions maps the non-sysconsole permissions required by each sysconsole view. +var SysconsoleAncillaryPermissions map[string][]*Permission +var SystemManagerDefaultPermissions []string +var SystemUserManagerDefaultPermissions []string +var SystemReadOnlyAdminDefaultPermissions []string +var SystemCustomGroupAdminDefaultPermissions []string + +var BuiltInSchemeManagedRoleIDs []string + +var NewSystemRoleIDs []string + +func init() { + NewSystemRoleIDs = []string{ + SystemUserManagerRoleId, + SystemReadOnlyAdminRoleId, + SystemManagerRoleId, + } + + BuiltInSchemeManagedRoleIDs = append([]string{ + SystemGuestRoleId, + SystemUserRoleId, + SystemAdminRoleId, + SystemPostAllRoleId, + SystemPostAllPublicRoleId, + SystemUserAccessTokenRoleId, + + TeamGuestRoleId, + TeamUserRoleId, + TeamAdminRoleId, + TeamPostAllRoleId, + TeamPostAllPublicRoleId, + + ChannelGuestRoleId, + ChannelUserRoleId, + ChannelAdminRoleId, + + CustomGroupUserRoleId, + + PlaybookAdminRoleId, + PlaybookMemberRoleId, + RunAdminRoleId, + RunMemberRoleId, + }, NewSystemRoleIDs...) + + // When updating the values here, the values in mattermost-redux must also be updated. + SysconsoleAncillaryPermissions = map[string][]*Permission{ + PermissionSysconsoleReadAboutEditionAndLicense.Id: { + PermissionReadLicenseInformation, + }, + PermissionSysconsoleWriteAboutEditionAndLicense.Id: { + PermissionManageLicenseInformation, + }, + PermissionSysconsoleReadUserManagementChannels.Id: { + PermissionReadPublicChannel, + PermissionReadChannel, + PermissionReadPublicChannelGroups, + PermissionReadPrivateChannelGroups, + }, + PermissionSysconsoleReadUserManagementUsers.Id: { + PermissionReadOtherUsersTeams, + }, + PermissionSysconsoleReadUserManagementTeams.Id: { + PermissionListPrivateTeams, + PermissionListPublicTeams, + PermissionViewTeam, + }, + PermissionSysconsoleReadEnvironmentElasticsearch.Id: { + PermissionReadElasticsearchPostIndexingJob, + PermissionReadElasticsearchPostAggregationJob, + }, + PermissionSysconsoleWriteEnvironmentWebServer.Id: { + PermissionTestSiteURL, + PermissionReloadConfig, + PermissionInvalidateCaches, + }, + PermissionSysconsoleWriteEnvironmentDatabase.Id: { + PermissionRecycleDatabaseConnections, + }, + PermissionSysconsoleWriteEnvironmentElasticsearch.Id: { + PermissionTestElasticsearch, + PermissionCreateElasticsearchPostIndexingJob, + PermissionManageElasticsearchPostIndexingJob, + PermissionCreateElasticsearchPostAggregationJob, + PermissionManageElasticsearchPostAggregationJob, + PermissionPurgeElasticsearchIndexes, + }, + PermissionSysconsoleWriteEnvironmentFileStorage.Id: { + PermissionTestS3, + }, + PermissionSysconsoleWriteEnvironmentSMTP.Id: { + PermissionTestEmail, + }, + PermissionSysconsoleReadReportingServerLogs.Id: { + PermissionGetLogs, + }, + PermissionSysconsoleReadReportingSiteStatistics.Id: { + PermissionGetAnalytics, + }, + PermissionSysconsoleReadReportingTeamStatistics.Id: { + PermissionGetAnalytics, + }, + PermissionSysconsoleWriteUserManagementUsers.Id: { + PermissionEditOtherUsers, + PermissionDemoteToGuest, + PermissionPromoteGuest, + }, + PermissionSysconsoleWriteUserManagementChannels.Id: { + PermissionManagePublicChannelProperties, + PermissionManagePrivateChannelProperties, + PermissionManagePrivateChannelMembers, + PermissionManagePublicChannelMembers, + PermissionDeletePrivateChannel, + PermissionDeletePublicChannel, + PermissionManageChannelRoles, + PermissionConvertPublicChannelToPrivate, + PermissionConvertPrivateChannelToPublic, + }, + PermissionSysconsoleWriteUserManagementTeams.Id: { + PermissionManageTeam, + PermissionManageTeamRoles, + PermissionRemoveUserFromTeam, + PermissionJoinPrivateTeams, + PermissionJoinPublicTeams, + PermissionAddUserToTeam, + }, + PermissionSysconsoleWriteUserManagementGroups.Id: { + PermissionManagePrivateChannelMembers, + PermissionManagePublicChannelMembers, + PermissionConvertPublicChannelToPrivate, + PermissionConvertPrivateChannelToPublic, + }, + PermissionSysconsoleWriteSiteCustomization.Id: { + PermissionEditBrand, + }, + PermissionSysconsoleWriteComplianceDataRetentionPolicy.Id: { + PermissionCreateDataRetentionJob, + PermissionManageDataRetentionJob, + }, + PermissionSysconsoleReadComplianceDataRetentionPolicy.Id: { + PermissionReadDataRetentionJob, + }, + PermissionSysconsoleWriteComplianceComplianceExport.Id: { + PermissionCreateComplianceExportJob, + PermissionManageComplianceExportJob, + PermissionDownloadComplianceExportResult, + }, + PermissionSysconsoleReadComplianceComplianceExport.Id: { + PermissionReadComplianceExportJob, + PermissionDownloadComplianceExportResult, + }, + PermissionSysconsoleReadComplianceComplianceMonitoring.Id: { + PermissionReadAudits, + }, + PermissionSysconsoleWriteAuthenticationLdap.Id: { + PermissionCreateLdapSyncJob, + PermissionManageLdapSyncJob, + PermissionAddLdapPublicCert, + PermissionRemoveLdapPublicCert, + PermissionAddLdapPrivateCert, + PermissionRemoveLdapPrivateCert, + }, + PermissionSysconsoleReadAuthenticationLdap.Id: { + PermissionTestLdap, + PermissionReadLdapSyncJob, + }, + PermissionSysconsoleWriteAuthenticationEmail.Id: { + PermissionInvalidateEmailInvite, + }, + PermissionSysconsoleWriteAuthenticationSaml.Id: { + PermissionGetSamlMetadataFromIdp, + PermissionAddSamlPublicCert, + PermissionAddSamlPrivateCert, + PermissionAddSamlIdpCert, + PermissionRemoveSamlPublicCert, + PermissionRemoveSamlPrivateCert, + PermissionRemoveSamlIdpCert, + PermissionGetSamlCertStatus, + }, + } + + SystemUserManagerDefaultPermissions = []string{ + PermissionSysconsoleReadUserManagementGroups.Id, + PermissionSysconsoleReadUserManagementTeams.Id, + PermissionSysconsoleReadUserManagementChannels.Id, + PermissionSysconsoleReadUserManagementPermissions.Id, + PermissionSysconsoleWriteUserManagementGroups.Id, + PermissionSysconsoleWriteUserManagementTeams.Id, + PermissionSysconsoleWriteUserManagementChannels.Id, + PermissionSysconsoleReadAuthenticationSignup.Id, + PermissionSysconsoleReadAuthenticationEmail.Id, + PermissionSysconsoleReadAuthenticationPassword.Id, + PermissionSysconsoleReadAuthenticationMfa.Id, + PermissionSysconsoleReadAuthenticationLdap.Id, + PermissionSysconsoleReadAuthenticationSaml.Id, + PermissionSysconsoleReadAuthenticationOpenid.Id, + PermissionSysconsoleReadAuthenticationGuestAccess.Id, + } + + SystemReadOnlyAdminDefaultPermissions = []string{ + PermissionSysconsoleReadAboutEditionAndLicense.Id, + PermissionSysconsoleReadReportingSiteStatistics.Id, + PermissionSysconsoleReadReportingTeamStatistics.Id, + PermissionSysconsoleReadReportingServerLogs.Id, + PermissionSysconsoleReadUserManagementUsers.Id, + PermissionSysconsoleReadUserManagementGroups.Id, + PermissionSysconsoleReadUserManagementTeams.Id, + PermissionSysconsoleReadUserManagementChannels.Id, + PermissionSysconsoleReadUserManagementPermissions.Id, + PermissionSysconsoleReadEnvironmentWebServer.Id, + PermissionSysconsoleReadEnvironmentDatabase.Id, + PermissionSysconsoleReadEnvironmentElasticsearch.Id, + PermissionSysconsoleReadEnvironmentFileStorage.Id, + PermissionSysconsoleReadEnvironmentImageProxy.Id, + PermissionSysconsoleReadEnvironmentSMTP.Id, + PermissionSysconsoleReadEnvironmentPushNotificationServer.Id, + PermissionSysconsoleReadEnvironmentHighAvailability.Id, + PermissionSysconsoleReadEnvironmentRateLimiting.Id, + PermissionSysconsoleReadEnvironmentLogging.Id, + PermissionSysconsoleReadEnvironmentSessionLengths.Id, + PermissionSysconsoleReadEnvironmentPerformanceMonitoring.Id, + PermissionSysconsoleReadEnvironmentDeveloper.Id, + PermissionSysconsoleReadSiteCustomization.Id, + PermissionSysconsoleReadSiteLocalization.Id, + PermissionSysconsoleReadSiteUsersAndTeams.Id, + PermissionSysconsoleReadSiteNotifications.Id, + PermissionSysconsoleReadSiteAnnouncementBanner.Id, + PermissionSysconsoleReadSiteEmoji.Id, + PermissionSysconsoleReadSitePosts.Id, + PermissionSysconsoleReadSiteFileSharingAndDownloads.Id, + PermissionSysconsoleReadSitePublicLinks.Id, + PermissionSysconsoleReadSiteNotices.Id, + PermissionSysconsoleReadAuthenticationSignup.Id, + PermissionSysconsoleReadAuthenticationEmail.Id, + PermissionSysconsoleReadAuthenticationPassword.Id, + PermissionSysconsoleReadAuthenticationMfa.Id, + PermissionSysconsoleReadAuthenticationLdap.Id, + PermissionSysconsoleReadAuthenticationSaml.Id, + PermissionSysconsoleReadAuthenticationOpenid.Id, + PermissionSysconsoleReadAuthenticationGuestAccess.Id, + PermissionSysconsoleReadPlugins.Id, + PermissionSysconsoleReadIntegrationsIntegrationManagement.Id, + PermissionSysconsoleReadIntegrationsBotAccounts.Id, + PermissionSysconsoleReadIntegrationsGif.Id, + PermissionSysconsoleReadIntegrationsCors.Id, + PermissionSysconsoleReadComplianceDataRetentionPolicy.Id, + PermissionSysconsoleReadComplianceComplianceExport.Id, + PermissionSysconsoleReadComplianceComplianceMonitoring.Id, + PermissionSysconsoleReadComplianceCustomTermsOfService.Id, + PermissionSysconsoleReadExperimentalFeatures.Id, + PermissionSysconsoleReadExperimentalFeatureFlags.Id, + PermissionSysconsoleReadProductsBoards.Id, + } + + SystemManagerDefaultPermissions = []string{ + PermissionSysconsoleReadAboutEditionAndLicense.Id, + PermissionSysconsoleReadReportingSiteStatistics.Id, + PermissionSysconsoleReadReportingTeamStatistics.Id, + PermissionSysconsoleReadReportingServerLogs.Id, + PermissionSysconsoleReadUserManagementGroups.Id, + PermissionSysconsoleReadUserManagementTeams.Id, + PermissionSysconsoleReadUserManagementChannels.Id, + PermissionSysconsoleReadUserManagementPermissions.Id, + PermissionSysconsoleWriteUserManagementGroups.Id, + PermissionSysconsoleWriteUserManagementTeams.Id, + PermissionSysconsoleWriteUserManagementChannels.Id, + PermissionSysconsoleWriteUserManagementPermissions.Id, + PermissionSysconsoleReadEnvironmentWebServer.Id, + PermissionSysconsoleReadEnvironmentDatabase.Id, + PermissionSysconsoleReadEnvironmentElasticsearch.Id, + PermissionSysconsoleReadEnvironmentFileStorage.Id, + PermissionSysconsoleReadEnvironmentImageProxy.Id, + PermissionSysconsoleReadEnvironmentSMTP.Id, + PermissionSysconsoleReadEnvironmentPushNotificationServer.Id, + PermissionSysconsoleReadEnvironmentHighAvailability.Id, + PermissionSysconsoleReadEnvironmentRateLimiting.Id, + PermissionSysconsoleReadEnvironmentLogging.Id, + PermissionSysconsoleReadEnvironmentSessionLengths.Id, + PermissionSysconsoleReadEnvironmentPerformanceMonitoring.Id, + PermissionSysconsoleReadEnvironmentDeveloper.Id, + PermissionSysconsoleWriteEnvironmentWebServer.Id, + PermissionSysconsoleWriteEnvironmentDatabase.Id, + PermissionSysconsoleWriteEnvironmentElasticsearch.Id, + PermissionSysconsoleWriteEnvironmentFileStorage.Id, + PermissionSysconsoleWriteEnvironmentImageProxy.Id, + PermissionSysconsoleWriteEnvironmentSMTP.Id, + PermissionSysconsoleWriteEnvironmentPushNotificationServer.Id, + PermissionSysconsoleWriteEnvironmentHighAvailability.Id, + PermissionSysconsoleWriteEnvironmentRateLimiting.Id, + PermissionSysconsoleWriteEnvironmentLogging.Id, + PermissionSysconsoleWriteEnvironmentSessionLengths.Id, + PermissionSysconsoleWriteEnvironmentPerformanceMonitoring.Id, + PermissionSysconsoleWriteEnvironmentDeveloper.Id, + PermissionSysconsoleReadSiteCustomization.Id, + PermissionSysconsoleWriteSiteCustomization.Id, + PermissionSysconsoleReadSiteLocalization.Id, + PermissionSysconsoleWriteSiteLocalization.Id, + PermissionSysconsoleReadSiteUsersAndTeams.Id, + PermissionSysconsoleWriteSiteUsersAndTeams.Id, + PermissionSysconsoleReadSiteNotifications.Id, + PermissionSysconsoleWriteSiteNotifications.Id, + PermissionSysconsoleReadSiteAnnouncementBanner.Id, + PermissionSysconsoleWriteSiteAnnouncementBanner.Id, + PermissionSysconsoleReadSiteEmoji.Id, + PermissionSysconsoleWriteSiteEmoji.Id, + PermissionSysconsoleReadSitePosts.Id, + PermissionSysconsoleWriteSitePosts.Id, + PermissionSysconsoleReadSiteFileSharingAndDownloads.Id, + PermissionSysconsoleWriteSiteFileSharingAndDownloads.Id, + PermissionSysconsoleReadSitePublicLinks.Id, + PermissionSysconsoleWriteSitePublicLinks.Id, + PermissionSysconsoleReadSiteNotices.Id, + PermissionSysconsoleWriteSiteNotices.Id, + PermissionSysconsoleReadAuthenticationSignup.Id, + PermissionSysconsoleReadAuthenticationEmail.Id, + PermissionSysconsoleReadAuthenticationPassword.Id, + PermissionSysconsoleReadAuthenticationMfa.Id, + PermissionSysconsoleReadAuthenticationLdap.Id, + PermissionSysconsoleReadAuthenticationSaml.Id, + PermissionSysconsoleReadAuthenticationOpenid.Id, + PermissionSysconsoleReadAuthenticationGuestAccess.Id, + PermissionSysconsoleReadPlugins.Id, + PermissionSysconsoleReadIntegrationsIntegrationManagement.Id, + PermissionSysconsoleReadIntegrationsBotAccounts.Id, + PermissionSysconsoleReadIntegrationsGif.Id, + PermissionSysconsoleReadIntegrationsCors.Id, + PermissionSysconsoleWriteIntegrationsIntegrationManagement.Id, + PermissionSysconsoleWriteIntegrationsBotAccounts.Id, + PermissionSysconsoleWriteIntegrationsGif.Id, + PermissionSysconsoleWriteIntegrationsCors.Id, + PermissionSysconsoleReadProductsBoards.Id, + PermissionSysconsoleWriteProductsBoards.Id, + PermissionManageOutgoingOAuthConnections.Id, + } + + SystemCustomGroupAdminDefaultPermissions = []string{ + PermissionCreateCustomGroup.Id, + PermissionEditCustomGroup.Id, + PermissionDeleteCustomGroup.Id, + PermissionRestoreCustomGroup.Id, + PermissionManageCustomGroupMembers.Id, + } + + // Add the ancillary permissions to each system role + SystemUserManagerDefaultPermissions = AddAncillaryPermissions(SystemUserManagerDefaultPermissions) + SystemReadOnlyAdminDefaultPermissions = AddAncillaryPermissions(SystemReadOnlyAdminDefaultPermissions) + SystemManagerDefaultPermissions = AddAncillaryPermissions(SystemManagerDefaultPermissions) + SystemCustomGroupAdminDefaultPermissions = AddAncillaryPermissions(SystemCustomGroupAdminDefaultPermissions) +} + +type RoleType string +type RoleScope string + +const ( + SystemGuestRoleId = "system_guest" + SystemUserRoleId = "system_user" + SystemAdminRoleId = "system_admin" + SystemPostAllRoleId = "system_post_all" + SystemPostAllPublicRoleId = "system_post_all_public" + SystemUserAccessTokenRoleId = "system_user_access_token" + SystemUserManagerRoleId = "system_user_manager" + SystemReadOnlyAdminRoleId = "system_read_only_admin" + SystemManagerRoleId = "system_manager" + SystemCustomGroupAdminRoleId = "system_custom_group_admin" + + TeamGuestRoleId = "team_guest" + TeamUserRoleId = "team_user" + TeamAdminRoleId = "team_admin" + TeamPostAllRoleId = "team_post_all" + TeamPostAllPublicRoleId = "team_post_all_public" + + ChannelGuestRoleId = "channel_guest" + ChannelUserRoleId = "channel_user" + ChannelAdminRoleId = "channel_admin" + + CustomGroupUserRoleId = "custom_group_user" + + PlaybookAdminRoleId = "playbook_admin" + PlaybookMemberRoleId = "playbook_member" + RunAdminRoleId = "run_admin" + RunMemberRoleId = "run_member" + + RoleNameMaxLength = 64 + RoleDisplayNameMaxLength = 128 + RoleDescriptionMaxLength = 1024 + + RoleScopeSystem RoleScope = "System" + RoleScopeTeam RoleScope = "Team" + RoleScopeChannel RoleScope = "Channel" + RoleScopeGroup RoleScope = "Group" + + RoleTypeGuest RoleType = "Guest" + RoleTypeUser RoleType = "User" + RoleTypeAdmin RoleType = "Admin" +) + +type Role struct { + Id string `json:"id"` + Name string `json:"name"` + DisplayName string `json:"display_name"` + Description string `json:"description"` + CreateAt int64 `json:"create_at"` + UpdateAt int64 `json:"update_at"` + DeleteAt int64 `json:"delete_at"` + Permissions []string `json:"permissions"` + SchemeManaged bool `json:"scheme_managed"` + BuiltIn bool `json:"built_in"` +} + +func (r *Role) Auditable() map[string]any { + return map[string]any{ + "id": r.Id, + "name": r.Name, + "display_name": r.DisplayName, + "description": r.Description, + "create_at": r.CreateAt, + "update_at": r.UpdateAt, + "delete_at": r.DeleteAt, + "permissions": r.Permissions, + "scheme_managed": r.SchemeManaged, + "built_in": r.BuiltIn, + } +} + +func (r *Role) Sanitize() { + r.DisplayName = FakeSetting + r.Description = FakeSetting +} + +func (r *Role) MarshalYAML() (any, error) { + return struct { + Id string `yaml:"id"` + Name string `yaml:"name"` + DisplayName string `yaml:"display_name"` + Description string `yaml:"description"` + CreateAt string `yaml:"create_at"` + UpdateAt string `yaml:"update_at"` + DeleteAt string `yaml:"delete_at"` + Permissions []string `yaml:"permissions"` + SchemeManaged bool `yaml:"scheme_managed"` + BuiltIn bool `yaml:"built_in"` + }{ + Id: r.Id, + Name: r.Name, + DisplayName: r.DisplayName, + Description: r.Description, + CreateAt: timeutils.FormatMillis(r.CreateAt), + UpdateAt: timeutils.FormatMillis(r.UpdateAt), + DeleteAt: timeutils.FormatMillis(r.DeleteAt), + Permissions: r.Permissions, + SchemeManaged: r.SchemeManaged, + BuiltIn: r.BuiltIn, + }, nil +} + +func (r *Role) UnmarshalYAML(unmarshal func(any) error) error { + out := struct { + Id string `yaml:"id"` + Name string `yaml:"name"` + DisplayName string `yaml:"display_name"` + Description string `yaml:"description"` + CreateAt string `yaml:"create_at"` + UpdateAt string `yaml:"update_at"` + DeleteAt string `yaml:"delete_at"` + Permissions []string `yaml:"permissions"` + SchemeManaged bool `yaml:"scheme_managed"` + BuiltIn bool `yaml:"built_in"` + }{} + + err := unmarshal(&out) + if err != nil { + return err + } + + createAt, err := timeutils.ParseFormatedMillis(out.CreateAt) + if err != nil { + return err + } + updateAt, err := timeutils.ParseFormatedMillis(out.UpdateAt) + if err != nil { + return err + } + deleteAt, err := timeutils.ParseFormatedMillis(out.DeleteAt) + if err != nil { + return err + } + + *r = Role{ + Id: out.Id, + Name: out.Name, + DisplayName: out.DisplayName, + Description: out.Description, + CreateAt: createAt, + UpdateAt: updateAt, + DeleteAt: deleteAt, + Permissions: out.Permissions, + SchemeManaged: out.SchemeManaged, + BuiltIn: out.BuiltIn, + } + return nil +} + +type RolePatch struct { + Permissions *[]string `json:"permissions"` +} + +func (r *RolePatch) Auditable() map[string]any { + return map[string]any{ + "permissions": r.Permissions, + } +} + +type RolePermissions struct { + RoleID string + Permissions []string +} + +func (r *Role) Patch(patch *RolePatch) { + if patch.Permissions != nil { + r.Permissions = *patch.Permissions + } +} + +func (r *Role) CreateAt_() float64 { + return float64(r.CreateAt) +} + +func (r *Role) UpdateAt_() float64 { + return float64(r.UpdateAt) +} + +func (r *Role) DeleteAt_() float64 { + return float64(r.DeleteAt) +} + +// MergeChannelHigherScopedPermissions is meant to be invoked on a channel scheme's role and merges the higher-scoped +// channel role's permissions. +func (r *Role) MergeChannelHigherScopedPermissions(higherScopedPermissions *RolePermissions) { + mergedPermissions := []string{} + + higherScopedPermissionsMap := asStringBoolMap(higherScopedPermissions.Permissions) + rolePermissionsMap := asStringBoolMap(r.Permissions) + + for _, cp := range AllPermissions { + if cp.Scope != PermissionScopeChannel { + continue + } + + _, presentOnHigherScope := higherScopedPermissionsMap[cp.Id] + + // For the channel admin role always look to the higher scope to determine if the role has their permission. + // The channel admin is a special case because they're not part of the UI to be "channel moderated", only + // channel members and channel guests are. + if higherScopedPermissions.RoleID == ChannelAdminRoleId && presentOnHigherScope { + mergedPermissions = append(mergedPermissions, cp.Id) + continue + } + + _, permissionIsModerated := ChannelModeratedPermissionsMap[cp.Id] + if permissionIsModerated { + _, presentOnRole := rolePermissionsMap[cp.Id] + if presentOnRole && presentOnHigherScope { + mergedPermissions = append(mergedPermissions, cp.Id) + } + } else { + if presentOnHigherScope { + mergedPermissions = append(mergedPermissions, cp.Id) + } + } + } + + r.Permissions = mergedPermissions +} + +// Returns an array of permissions that are in either role.Permissions +// or patch.Permissions, but not both. +func PermissionsChangedByPatch(role *Role, patch *RolePatch) []string { + var result []string + + if patch.Permissions == nil { + return result + } + + roleMap := make(map[string]bool) + patchMap := make(map[string]bool) + + for _, permission := range role.Permissions { + roleMap[permission] = true + } + + for _, permission := range *patch.Permissions { + patchMap[permission] = true + } + + for _, permission := range role.Permissions { + if !patchMap[permission] { + result = append(result, permission) + } + } + + for _, permission := range *patch.Permissions { + if !roleMap[permission] { + result = append(result, permission) + } + } + + return result +} + +func ChannelModeratedPermissionsChangedByPatch(role *Role, patch *RolePatch) []string { + var result []string + + if role == nil { + return result + } + + if patch.Permissions == nil { + return result + } + + roleMap := make(map[string]bool) + patchMap := make(map[string]bool) + + for _, permission := range role.Permissions { + if channelModeratedPermissionName, found := ChannelModeratedPermissionsMap[permission]; found { + roleMap[channelModeratedPermissionName] = true + } + } + + for _, permission := range *patch.Permissions { + if channelModeratedPermissionName, found := ChannelModeratedPermissionsMap[permission]; found { + patchMap[channelModeratedPermissionName] = true + } + } + + for permissionKey := range roleMap { + if !patchMap[permissionKey] { + result = append(result, permissionKey) + } + } + + for permissionKey := range patchMap { + if !roleMap[permissionKey] { + result = append(result, permissionKey) + } + } + + return result +} + +func isModeratedBookmarkPermission(permission string) bool { + for _, mbp := range ModeratedBookmarkPermissions { + if mbp.Id == permission { + return true + } + } + return false +} + +// GetChannelModeratedPermissions returns a map of channel moderated permissions that the role has access to +func (r *Role) GetChannelModeratedPermissions(channelType ChannelType) map[string]bool { + moderatedPermissions := make(map[string]bool) + for _, permission := range r.Permissions { + if _, found := ChannelModeratedPermissionsMap[permission]; !found { + continue + } + + for moderated, moderatedPermissionValue := range ChannelModeratedPermissionsMap { + // the moderated permission has already been found to be true so skip this iteration + if moderatedPermissions[moderatedPermissionValue] { + continue + } + + if moderated == permission { + // Special case where the channel moderated permission for `manage_members` is different depending + // on whether the channel is private or public + if moderated == PermissionManagePublicChannelMembers.Id || moderated == PermissionManagePrivateChannelMembers.Id { + canManagePublic := channelType == ChannelTypeOpen && moderated == PermissionManagePublicChannelMembers.Id + canManagePrivate := channelType == ChannelTypePrivate && moderated == PermissionManagePrivateChannelMembers.Id + moderatedPermissions[moderatedPermissionValue] = canManagePublic || canManagePrivate + + // Special case where the channel moderated permission for `manage_bookmarks` is different + // depending on whether the channel is private or public. + // + // Only AddBookmark is checked even if the permission includes four (add, delete, edit and + // order) as all of them are enabled or disabled in together + } else if isModeratedBookmarkPermission(moderated) { + canManagePublic := channelType == ChannelTypeOpen && moderated == PermissionAddBookmarkPublicChannel.Id + canManagePrivate := channelType == ChannelTypePrivate && moderated == PermissionAddBookmarkPrivateChannel.Id + moderatedPermissions[moderatedPermissionValue] = canManagePublic || canManagePrivate + } else { + moderatedPermissions[moderatedPermissionValue] = true + } + } + } + } + + return moderatedPermissions +} + +// RolePatchFromChannelModerationsPatch Creates and returns a RolePatch based on a slice of ChannelModerationPatches, roleName is expected to be either "members" or "guests". +func (r *Role) RolePatchFromChannelModerationsPatch(channelModerationsPatch []*ChannelModerationPatch, roleName string) *RolePatch { + permissionsToAddToPatch := make(map[string]bool) + + // Iterate through the list of existing permissions on the role and append permissions that we want to keep. + for _, permission := range r.Permissions { + // Permission is not moderated so dont add it to the patch and skip the channelModerationsPatch + if _, isModerated := ChannelModeratedPermissionsMap[permission]; !isModerated { + continue + } + + permissionEnabled := true + // Check if permission has a matching moderated permission name inside the channel moderation patch + for _, channelModerationPatch := range channelModerationsPatch { + if *channelModerationPatch.Name == ChannelModeratedPermissionsMap[permission] { + // Permission key exists in patch with a value of false so skip over it + if roleName == "members" { + if channelModerationPatch.Roles.Members != nil && !*channelModerationPatch.Roles.Members { + permissionEnabled = false + } + } else if roleName == "guests" { + if channelModerationPatch.Roles.Guests != nil && !*channelModerationPatch.Roles.Guests { + permissionEnabled = false + } + } + } + } + + if permissionEnabled { + permissionsToAddToPatch[permission] = true + } + } + + // Iterate through the patch and add any permissions that dont already exist on the role + for _, channelModerationPatch := range channelModerationsPatch { + for permission, moderatedPermissionName := range ChannelModeratedPermissionsMap { + if roleName == "members" && channelModerationPatch.Roles.Members != nil && *channelModerationPatch.Roles.Members && *channelModerationPatch.Name == moderatedPermissionName { + permissionsToAddToPatch[permission] = true + } + + if roleName == "guests" && channelModerationPatch.Roles.Guests != nil && *channelModerationPatch.Roles.Guests && *channelModerationPatch.Name == moderatedPermissionName { + permissionsToAddToPatch[permission] = true + } + } + } + + patchPermissions := make([]string, 0, len(permissionsToAddToPatch)) + for permission := range permissionsToAddToPatch { + patchPermissions = append(patchPermissions, permission) + } + + return &RolePatch{Permissions: &patchPermissions} +} + +func (r *Role) IsValid() bool { + if !IsValidId(r.Id) { + return false + } + + return r.IsValidWithoutId() +} + +func (r *Role) IsValidWithoutId() bool { + if !IsValidRoleName(r.Name) { + return false + } + + if r.DisplayName == "" || len(r.DisplayName) > RoleDisplayNameMaxLength { + return false + } + + if len(r.Description) > RoleDescriptionMaxLength { + return false + } + + check := func(perms []*Permission, permission string) bool { + for _, p := range perms { + if permission == p.Id { + return true + } + } + return false + } + for _, permission := range r.Permissions { + permissionValidated := check(AllPermissions, permission) || check(DeprecatedPermissions, permission) + if !permissionValidated { + return false + } + } + + return true +} + +func CleanRoleNames(roleNames []string) ([]string, bool) { + var cleanedRoleNames []string + for _, roleName := range roleNames { + if strings.TrimSpace(roleName) == "" { + continue + } + + if !IsValidRoleName(roleName) { + return roleNames, false + } + + cleanedRoleNames = append(cleanedRoleNames, roleName) + } + + return cleanedRoleNames, true +} + +func IsValidRoleName(roleName string) bool { + if roleName == "" || len(roleName) > RoleNameMaxLength { + return false + } + + if strings.TrimLeft(roleName, "abcdefghijklmnopqrstuvwxyz0123456789_") != "" { + return false + } + + return true +} + +func MakeDefaultRoles() map[string]*Role { + roles := make(map[string]*Role) + + roles[CustomGroupUserRoleId] = &Role{ + Name: CustomGroupUserRoleId, + DisplayName: fmt.Sprintf("authentication.roles.%s.name", CustomGroupUserRoleId), + Description: fmt.Sprintf("authentication.roles.%s.description", CustomGroupUserRoleId), + Permissions: []string{}, + } + + roles[ChannelGuestRoleId] = &Role{ + Name: "channel_guest", + DisplayName: "authentication.roles.channel_guest.name", + Description: "authentication.roles.channel_guest.description", + Permissions: []string{ + PermissionReadChannel.Id, + PermissionReadChannelContent.Id, + PermissionAddReaction.Id, + PermissionRemoveReaction.Id, + PermissionUploadFile.Id, + PermissionEditPost.Id, + PermissionCreatePost.Id, + PermissionUseChannelMentions.Id, + }, + SchemeManaged: true, + BuiltIn: true, + } + + roles[ChannelUserRoleId] = &Role{ + Name: "channel_user", + DisplayName: "authentication.roles.channel_user.name", + Description: "authentication.roles.channel_user.description", + Permissions: []string{ + PermissionReadChannel.Id, + PermissionReadChannelContent.Id, + PermissionAddReaction.Id, + PermissionRemoveReaction.Id, + PermissionManagePublicChannelMembers.Id, + PermissionUploadFile.Id, + PermissionGetPublicLink.Id, + PermissionCreatePost.Id, + PermissionUseChannelMentions.Id, + PermissionManagePublicChannelProperties.Id, + PermissionDeletePublicChannel.Id, + PermissionManagePrivateChannelProperties.Id, + PermissionDeletePrivateChannel.Id, + PermissionManagePrivateChannelMembers.Id, + PermissionDeletePost.Id, + PermissionEditPost.Id, + PermissionAddBookmarkPublicChannel.Id, + PermissionEditBookmarkPublicChannel.Id, + PermissionDeleteBookmarkPublicChannel.Id, + PermissionOrderBookmarkPublicChannel.Id, + PermissionAddBookmarkPrivateChannel.Id, + PermissionEditBookmarkPrivateChannel.Id, + PermissionDeleteBookmarkPrivateChannel.Id, + PermissionOrderBookmarkPrivateChannel.Id, + }, + SchemeManaged: true, + BuiltIn: true, + } + + roles[ChannelAdminRoleId] = &Role{ + Name: "channel_admin", + DisplayName: "authentication.roles.channel_admin.name", + Description: "authentication.roles.channel_admin.description", + Permissions: []string{ + PermissionManageChannelRoles.Id, + PermissionUseGroupMentions.Id, + PermissionAddBookmarkPublicChannel.Id, + PermissionEditBookmarkPublicChannel.Id, + PermissionDeleteBookmarkPublicChannel.Id, + PermissionOrderBookmarkPublicChannel.Id, + PermissionAddBookmarkPrivateChannel.Id, + PermissionEditBookmarkPrivateChannel.Id, + PermissionDeleteBookmarkPrivateChannel.Id, + PermissionOrderBookmarkPrivateChannel.Id, + PermissionManagePublicChannelBanner.Id, + PermissionManagePrivateChannelBanner.Id, + PermissionManageChannelAccessRules.Id, + }, + SchemeManaged: true, + BuiltIn: true, + } + + roles[TeamGuestRoleId] = &Role{ + Name: "team_guest", + DisplayName: "authentication.roles.team_guest.name", + Description: "authentication.roles.team_guest.description", + Permissions: []string{ + PermissionViewTeam.Id, + }, + SchemeManaged: true, + BuiltIn: true, + } + + roles[TeamUserRoleId] = &Role{ + Name: "team_user", + DisplayName: "authentication.roles.team_user.name", + Description: "authentication.roles.team_user.description", + Permissions: []string{ + PermissionListTeamChannels.Id, + PermissionJoinPublicChannels.Id, + PermissionReadPublicChannel.Id, + PermissionViewTeam.Id, + PermissionCreatePublicChannel.Id, + PermissionCreatePrivateChannel.Id, + PermissionInviteUser.Id, + PermissionAddUserToTeam.Id, + }, + SchemeManaged: true, + BuiltIn: true, + } + + roles[TeamPostAllRoleId] = &Role{ + Name: "team_post_all", + DisplayName: "authentication.roles.team_post_all.name", + Description: "authentication.roles.team_post_all.description", + Permissions: []string{ + PermissionCreatePost.Id, + PermissionUseChannelMentions.Id, + }, + SchemeManaged: false, + BuiltIn: true, + } + + roles[TeamPostAllPublicRoleId] = &Role{ + Name: "team_post_all_public", + DisplayName: "authentication.roles.team_post_all_public.name", + Description: "authentication.roles.team_post_all_public.description", + Permissions: []string{ + PermissionCreatePostPublic.Id, + PermissionUseChannelMentions.Id, + }, + SchemeManaged: false, + BuiltIn: true, + } + + roles[TeamAdminRoleId] = &Role{ + Name: "team_admin", + DisplayName: "authentication.roles.team_admin.name", + Description: "authentication.roles.team_admin.description", + Permissions: []string{ + PermissionRemoveUserFromTeam.Id, + PermissionManageTeam.Id, + PermissionImportTeam.Id, + PermissionManageTeamRoles.Id, + PermissionManageChannelRoles.Id, + PermissionManageOwnIncomingWebhooks.Id, + PermissionManageOthersIncomingWebhooks.Id, + PermissionManageOwnOutgoingWebhooks.Id, + PermissionManageOthersOutgoingWebhooks.Id, + PermissionManageOwnSlashCommands.Id, + PermissionManageOthersSlashCommands.Id, + PermissionBypassIncomingWebhookChannelLock.Id, + PermissionConvertPublicChannelToPrivate.Id, + PermissionConvertPrivateChannelToPublic.Id, + PermissionDeletePost.Id, + PermissionDeleteOthersPosts.Id, + PermissionAddBookmarkPublicChannel.Id, + PermissionEditBookmarkPublicChannel.Id, + PermissionDeleteBookmarkPublicChannel.Id, + PermissionOrderBookmarkPublicChannel.Id, + PermissionAddBookmarkPrivateChannel.Id, + PermissionEditBookmarkPrivateChannel.Id, + PermissionDeleteBookmarkPrivateChannel.Id, + PermissionOrderBookmarkPrivateChannel.Id, + PermissionManagePublicChannelBanner.Id, + PermissionManagePrivateChannelBanner.Id, + PermissionManageChannelAccessRules.Id, + }, + SchemeManaged: true, + BuiltIn: true, + } + + roles[PlaybookAdminRoleId] = &Role{ + Name: PlaybookAdminRoleId, + DisplayName: "authentication.roles.playbook_admin.name", + Description: "authentication.roles.playbook_admin.description", + Permissions: []string{ + PermissionPublicPlaybookManageMembers.Id, + PermissionPublicPlaybookManageRoles.Id, + PermissionPublicPlaybookManageProperties.Id, + PermissionPrivatePlaybookManageMembers.Id, + PermissionPrivatePlaybookManageRoles.Id, + PermissionPrivatePlaybookManageProperties.Id, + PermissionPublicPlaybookMakePrivate.Id, + }, + SchemeManaged: true, + BuiltIn: true, + } + + roles[PlaybookMemberRoleId] = &Role{ + Name: PlaybookMemberRoleId, + DisplayName: "authentication.roles.playbook_member.name", + Description: "authentication.roles.playbook_member.description", + Permissions: []string{ + PermissionPublicPlaybookView.Id, + PermissionPublicPlaybookManageMembers.Id, + PermissionPublicPlaybookManageProperties.Id, + PermissionPrivatePlaybookView.Id, + PermissionPrivatePlaybookManageMembers.Id, + PermissionPrivatePlaybookManageProperties.Id, + PermissionRunCreate.Id, + }, + SchemeManaged: true, + BuiltIn: true, + } + + roles[RunAdminRoleId] = &Role{ + Name: RunAdminRoleId, + DisplayName: "authentication.roles.run_admin.name", + Description: "authentication.roles.run_admin.description", + Permissions: []string{ + PermissionRunManageMembers.Id, + PermissionRunManageProperties.Id, + }, + SchemeManaged: true, + BuiltIn: true, + } + + roles[RunMemberRoleId] = &Role{ + Name: RunMemberRoleId, + DisplayName: "authentication.roles.run_member.name", + Description: "authentication.roles.run_member.description", + Permissions: []string{ + PermissionRunView.Id, + }, + SchemeManaged: true, + BuiltIn: true, + } + + roles[SystemGuestRoleId] = &Role{ + Name: "system_guest", + DisplayName: "authentication.roles.global_guest.name", + Description: "authentication.roles.global_guest.description", + Permissions: []string{ + PermissionCreateDirectChannel.Id, + PermissionCreateGroupChannel.Id, + }, + SchemeManaged: true, + BuiltIn: true, + } + + roles[SystemUserRoleId] = &Role{ + Name: "system_user", + DisplayName: "authentication.roles.global_user.name", + Description: "authentication.roles.global_user.description", + Permissions: []string{ + PermissionListPublicTeams.Id, + PermissionJoinPublicTeams.Id, + PermissionCreateDirectChannel.Id, + PermissionCreateGroupChannel.Id, + PermissionViewMembers.Id, + PermissionCreateTeam.Id, + PermissionCreateCustomGroup.Id, + PermissionEditCustomGroup.Id, + PermissionDeleteCustomGroup.Id, + PermissionRestoreCustomGroup.Id, + PermissionManageCustomGroupMembers.Id, + }, + SchemeManaged: true, + BuiltIn: true, + } + + roles[SystemPostAllRoleId] = &Role{ + Name: "system_post_all", + DisplayName: "authentication.roles.system_post_all.name", + Description: "authentication.roles.system_post_all.description", + Permissions: []string{ + PermissionCreatePost.Id, + PermissionUseChannelMentions.Id, + }, + SchemeManaged: false, + BuiltIn: true, + } + + roles[SystemPostAllPublicRoleId] = &Role{ + Name: "system_post_all_public", + DisplayName: "authentication.roles.system_post_all_public.name", + Description: "authentication.roles.system_post_all_public.description", + Permissions: []string{ + PermissionCreatePostPublic.Id, + PermissionUseChannelMentions.Id, + }, + SchemeManaged: false, + BuiltIn: true, + } + + roles[SystemUserAccessTokenRoleId] = &Role{ + Name: "system_user_access_token", + DisplayName: "authentication.roles.system_user_access_token.name", + Description: "authentication.roles.system_user_access_token.description", + Permissions: []string{ + PermissionCreateUserAccessToken.Id, + PermissionReadUserAccessToken.Id, + PermissionRevokeUserAccessToken.Id, + }, + SchemeManaged: false, + BuiltIn: true, + } + + roles[SystemUserManagerRoleId] = &Role{ + Name: "system_user_manager", + DisplayName: "authentication.roles.system_user_manager.name", + Description: "authentication.roles.system_user_manager.description", + Permissions: SystemUserManagerDefaultPermissions, + SchemeManaged: false, + BuiltIn: true, + } + + roles[SystemReadOnlyAdminRoleId] = &Role{ + Name: "system_read_only_admin", + DisplayName: "authentication.roles.system_read_only_admin.name", + Description: "authentication.roles.system_read_only_admin.description", + Permissions: SystemReadOnlyAdminDefaultPermissions, + SchemeManaged: false, + BuiltIn: true, + } + + roles[SystemManagerRoleId] = &Role{ + Name: "system_manager", + DisplayName: "authentication.roles.system_manager.name", + Description: "authentication.roles.system_manager.description", + Permissions: SystemManagerDefaultPermissions, + SchemeManaged: false, + BuiltIn: true, + } + + roles[SystemCustomGroupAdminRoleId] = &Role{ + Name: "system_custom_group_admin", + DisplayName: "authentication.roles.system_custom_group_admin.name", + Description: "authentication.roles.system_custom_group_admin.description", + Permissions: SystemCustomGroupAdminDefaultPermissions, + SchemeManaged: false, + BuiltIn: true, + } + + allPermissionIDs := []string{} + for _, permission := range AllPermissions { + allPermissionIDs = append(allPermissionIDs, permission.Id) + } + + roles[SystemAdminRoleId] = &Role{ + Name: "system_admin", + DisplayName: "authentication.roles.global_admin.name", + Description: "authentication.roles.global_admin.description", + // System admins can do anything channel and team admins can do + // plus everything members of teams and channels can do to all teams + // and channels on the system + Permissions: allPermissionIDs, + SchemeManaged: true, + BuiltIn: true, + } + + return roles +} + +func AddAncillaryPermissions(permissions []string) []string { + for _, permission := range permissions { + if ancillaryPermissions, ok := SysconsoleAncillaryPermissions[permission]; ok { + for _, ancillaryPermission := range ancillaryPermissions { + permissions = append(permissions, ancillaryPermission.Id) + } + } + } + return permissions +} + +func asStringBoolMap(list []string) map[string]bool { + listMap := make(map[string]bool, len(list)) + for _, p := range list { + listMap[p] = true + } + return listMap +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/saml.go b/vendor/github.com/mattermost/mattermost/server/public/model/saml.go new file mode 100644 index 00000000..e9e987d8 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/saml.go @@ -0,0 +1,176 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "encoding/xml" + "time" +) + +const ( + UserAuthServiceSaml = "saml" + UserAuthServiceSamlText = "SAML" + UserAuthServiceIsSaml = "isSaml" + UserAuthServiceIsMobile = "isMobile" + UserAuthServiceIsOAuth = "isOAuthUser" +) + +type SamlAuthRequest struct { + Base64AuthRequest string + URL string + RelayState string +} + +type SamlCertificateStatus struct { + IdpCertificateFile bool `json:"idp_certificate_file"` + PrivateKeyFile bool `json:"private_key_file"` + PublicCertificateFile bool `json:"public_certificate_file"` +} + +type SamlMetadataResponse struct { + IdpDescriptorURL string `json:"idp_descriptor_url"` + IdpURL string `json:"idp_url"` + IdpPublicCertificate string `json:"idp_public_certificate"` +} + +type NameIDFormat struct { + XMLName xml.Name + Format string `xml:",attr,omitempty"` + Value string `xml:",innerxml"` +} + +type NameID struct { + NameQualifier string `xml:",attr"` + SPNameQualifier string `xml:",attr"` + Format string `xml:",attr,omitempty"` + SPProvidedID string `xml:",attr"` + Value string `xml:",chardata"` +} + +type AttributeValue struct { + Type string `xml:"http://www.w3.org/2001/XMLSchema-instance type,attr"` + Value string `xml:",chardata"` + NameID *NameID +} + +type Attribute struct { + XMLName xml.Name + FriendlyName string `xml:",attr"` + Name string `xml:",attr"` + NameFormat string `xml:",attr"` + Values []AttributeValue `xml:"AttributeValue"` +} + +type Endpoint struct { + XMLName xml.Name + Binding string `xml:"Binding,attr"` + Location string `xml:"Location,attr"` + ResponseLocation string `xml:"ResponseLocation,attr,omitempty"` +} + +type IndexedEndpoint struct { + XMLName xml.Name + Binding string `xml:"Binding,attr"` + Location string `xml:"Location,attr"` + ResponseLocation *string `xml:"ResponseLocation,attr,omitempty"` + Index int `xml:"index,attr"` + IsDefault *bool `xml:"isDefault,attr"` +} + +type IDPSSODescriptor struct { + XMLName xml.Name `xml:"urn:oasis:names:tc:SAML:2.0:metadata IDPSSODescriptor"` + SSODescriptor + WantAuthnRequestsSigned *bool `xml:",attr"` + + SingleSignOnServices []Endpoint `xml:"SingleSignOnService"` + NameIDMappingServices []Endpoint `xml:"NameIDMappingService"` + AssertionIDRequestServices []Endpoint `xml:"AssertionIDRequestService"` + AttributeProfiles []string `xml:"AttributeProfile"` + Attributes []Attribute `xml:"Attribute"` +} + +type SSODescriptor struct { + XMLName xml.Name + RoleDescriptor + ArtifactResolutionServices []IndexedEndpoint `xml:"ArtifactResolutionService"` + SingleLogoutServices []Endpoint `xml:"SingleLogoutService"` + ManageNameIDServices []Endpoint `xml:"ManageNameIDService"` + NameIDFormats []NameIDFormat `xml:"NameIDFormat"` +} + +type X509Certificate struct { + XMLName xml.Name + Cert string `xml:",innerxml"` +} + +type X509Data struct { + XMLName xml.Name + X509Certificate X509Certificate `xml:"X509Certificate"` +} + +type KeyInfo struct { + XMLName xml.Name + DS string `xml:"xmlns:ds,attr"` + X509Data X509Data `xml:"X509Data"` +} +type EncryptionMethod struct { + Algorithm string `xml:"Algorithm,attr"` +} + +type KeyDescriptor struct { + XMLName xml.Name + Use string `xml:"use,attr,omitempty"` + KeyInfo KeyInfo `xml:"http://www.w3.org/2000/09/xmldsig# KeyInfo,omitempty"` +} + +type RoleDescriptor struct { + XMLName xml.Name + ID string `xml:",attr,omitempty"` + ValidUntil time.Time `xml:"validUntil,attr,omitempty"` + CacheDuration time.Duration `xml:"cacheDuration,attr,omitempty"` + ProtocolSupportEnumeration string `xml:"protocolSupportEnumeration,attr"` + ErrorURL string `xml:"errorURL,attr,omitempty"` + KeyDescriptors []KeyDescriptor `xml:"KeyDescriptor,omitempty"` + Organization *Organization `xml:"Organization,omitempty"` + ContactPersons []ContactPerson `xml:"ContactPerson,omitempty"` +} + +type ContactPerson struct { + XMLName xml.Name + ContactType string `xml:"contactType,attr"` + Company string + GivenName string + SurName string + EmailAddresses []string `xml:"EmailAddress"` + TelephoneNumbers []string `xml:"TelephoneNumber"` +} + +type LocalizedName struct { + Lang string `xml:"xml lang,attr"` + Value string `xml:",chardata"` +} + +type LocalizedURI struct { + Lang string `xml:"xml lang,attr"` + Value string `xml:",chardata"` +} + +type Organization struct { + XMLName xml.Name + OrganizationNames []LocalizedName `xml:"OrganizationName"` + OrganizationDisplayNames []LocalizedName `xml:"OrganizationDisplayName"` + OrganizationURLs []LocalizedURI `xml:"OrganizationURL"` +} + +type EntityDescriptor struct { + XMLName xml.Name `xml:"urn:oasis:names:tc:SAML:2.0:metadata EntityDescriptor"` + EntityID string `xml:"entityID,attr"` + ID string `xml:",attr,omitempty"` + ValidUntil time.Time `xml:"validUntil,attr,omitempty"` + CacheDuration time.Duration `xml:"cacheDuration,attr,omitempty"` + RoleDescriptors []RoleDescriptor `xml:"RoleDescriptor"` + IDPSSODescriptors []IDPSSODescriptor `xml:"IDPSSODescriptor"` + Organization Organization `xml:"Organization"` + ContactPerson ContactPerson `xml:"ContactPerson"` +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/scheduled_post.go b/vendor/github.com/mattermost/mattermost/server/public/model/scheduled_post.go new file mode 100644 index 00000000..826a317c --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/scheduled_post.go @@ -0,0 +1,172 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "fmt" + "net/http" +) + +const ( + ScheduledPostErrorUnknownError = "unknown" + ScheduledPostErrorCodeChannelArchived = "channel_archived" + ScheduledPostErrorCodeRestrictedDM = "restricted_dm" + ScheduledPostErrorCodeChannelNotFound = "channel_not_found" + ScheduledPostErrorCodeUserDoesNotExist = "user_missing" + ScheduledPostErrorCodeUserDeleted = "user_deleted" + ScheduledPostErrorCodeNoChannelPermission = "no_channel_permission" + ScheduledPostErrorNoChannelMember = "no_channel_member" + ScheduledPostErrorThreadDeleted = "thread_deleted" + ScheduledPostErrorUnableToSend = "unable_to_send" + ScheduledPostErrorInvalidPost = "invalid_post" +) + +// allow scheduled posts to be created up to +// this much time in the past. While this ir primarily added for reliable test cases, +// it also helps with flaky and slow network connection between the client and the server, +const scheduledPostMaxTimeGap = -5000 + +type ScheduledPost struct { + Draft + Id string `json:"id"` + ScheduledAt int64 `json:"scheduled_at"` + ProcessedAt int64 `json:"processed_at"` + ErrorCode string `json:"error_code"` +} + +func (s *ScheduledPost) IsValid(maxMessageSize int) *AppError { + draftAppErr := s.Draft.IsValid(maxMessageSize) + if draftAppErr != nil { + return draftAppErr + } + + return s.BaseIsValid() +} + +func (s *ScheduledPost) BaseIsValid() *AppError { + if draftAppErr := s.Draft.BaseIsValid(); draftAppErr != nil { + return draftAppErr + } + + if s.Id == "" { + return NewAppError("ScheduledPost.IsValid", "model.scheduled_post.is_valid.id.app_error", nil, "id="+s.Id, http.StatusBadRequest) + } + + if len(s.Message) == 0 && len(s.FileIds) == 0 { + return NewAppError("ScheduledPost.IsValid", "model.scheduled_post.is_valid.empty_post.app_error", nil, "id="+s.Id, http.StatusBadRequest) + } + + if (s.ScheduledAt - GetMillis()) < scheduledPostMaxTimeGap { + return NewAppError("ScheduledPost.IsValid", "model.scheduled_post.is_valid.scheduled_at.app_error", nil, "id="+s.Id, http.StatusBadRequest) + } + + if s.ProcessedAt < 0 { + return NewAppError("ScheduledPost.IsValid", "model.scheduled_post.is_valid.processed_at.app_error", nil, "id="+s.Id, http.StatusBadRequest) + } + + return nil +} + +func (s *ScheduledPost) PreSave() { + if s.Id == "" { + s.Id = NewId() + } + + s.ProcessedAt = 0 + s.ErrorCode = "" + + s.Draft.PreSave() +} + +func (s *ScheduledPost) PreUpdate() { + s.Draft.UpdateAt = GetMillis() + s.Draft.PreCommit() +} + +// ToPost converts a scheduled post toa regular, mattermost post object. +func (s *ScheduledPost) ToPost() (*Post, error) { + post := &Post{ + UserId: s.UserId, + ChannelId: s.ChannelId, + Message: s.Message, + FileIds: s.FileIds, + RootId: s.RootId, + Metadata: s.Metadata, + } + + for key, value := range s.GetProps() { + post.AddProp(key, value) + } + + if len(s.Priority) > 0 { + priority, ok := s.Priority["priority"].(string) + if !ok { + return nil, fmt.Errorf(`ScheduledPost.ToPost: priority is not a string. ScheduledPost.Priority: %v`, s.Priority) + } + + requestedAck, ok := s.Priority["requested_ack"].(bool) + if !ok { + return nil, fmt.Errorf(`ScheduledPost.ToPost: requested_ack is not a bool. ScheduledPost.Priority: %v`, s.Priority) + } + + persistentNotifications, ok := s.Priority["persistent_notifications"].(bool) + if !ok { + return nil, fmt.Errorf(`ScheduledPost.ToPost: persistent_notifications is not a bool. ScheduledPost.Priority: %v`, s.Priority) + } + + if post.Metadata == nil { + post.Metadata = &PostMetadata{} + } + + post.Metadata.Priority = &PostPriority{ + Priority: NewPointer(priority), + RequestedAck: NewPointer(requestedAck), + PersistentNotifications: NewPointer(persistentNotifications), + } + } + + return post, nil +} + +func (s *ScheduledPost) Auditable() map[string]any { + var metaData map[string]any + if s.Metadata != nil { + metaData = s.Metadata.Auditable() + } + + return map[string]any{ + "id": s.Id, + "create_at": s.CreateAt, + "update_at": s.UpdateAt, + "user_id": s.UserId, + "channel_id": s.ChannelId, + "root_id": s.RootId, + "props": s.GetProps(), + "file_ids": s.FileIds, + "metadata": metaData, + } +} + +func (s *ScheduledPost) RestoreNonUpdatableFields(originalScheduledPost *ScheduledPost) { + s.Id = originalScheduledPost.Id + s.CreateAt = originalScheduledPost.CreateAt + s.UserId = originalScheduledPost.UserId + s.ChannelId = originalScheduledPost.ChannelId + s.RootId = originalScheduledPost.RootId +} + +func (s *ScheduledPost) SanitizeInput() { + s.CreateAt = 0 + + if s.Metadata != nil { + s.Metadata.Embeds = nil + } +} + +func (s *ScheduledPost) GetPriority() *PostPriority { + if s.Metadata == nil { + return nil + } + return s.Metadata.Priority +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/scheduled_task.go b/vendor/github.com/mattermost/mattermost/server/public/model/scheduled_task.go new file mode 100644 index 00000000..cf20db63 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/scheduled_task.go @@ -0,0 +1,100 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "fmt" + "time" +) + +type TaskFunc func() + +type ScheduledTask struct { + Name string `json:"name"` + Interval time.Duration `json:"interval"` + Recurring bool `json:"recurring"` + function func() + cancel chan struct{} + cancelled chan struct{} + fromNextIntervalTime bool +} + +func CreateTask(name string, function TaskFunc, timeToExecution time.Duration) *ScheduledTask { + return createTask(name, function, timeToExecution, false, false) +} + +func CreateRecurringTask(name string, function TaskFunc, interval time.Duration) *ScheduledTask { + return createTask(name, function, interval, true, false) +} + +func CreateRecurringTaskFromNextIntervalTime(name string, function TaskFunc, interval time.Duration) *ScheduledTask { + return createTask(name, function, interval, true, true) +} + +func createTask(name string, function TaskFunc, interval time.Duration, recurring bool, fromNextIntervalTime bool) *ScheduledTask { + task := &ScheduledTask{ + Name: name, + Interval: interval, + Recurring: recurring, + function: function, + cancel: make(chan struct{}), + cancelled: make(chan struct{}), + fromNextIntervalTime: fromNextIntervalTime, + } + + go func() { + defer close(task.cancelled) + + var firstTick <-chan time.Time + var ticker *time.Ticker + + if task.fromNextIntervalTime { + currTime := time.Now() + first := currTime.Truncate(interval) + if first.Before(currTime) { + first = first.Add(interval) + } + firstTick = time.After(time.Until(first)) + ticker = &time.Ticker{C: nil} + } else { + firstTick = nil + ticker = time.NewTicker(interval) + } + defer func() { + ticker.Stop() + }() + + for { + select { + case <-firstTick: + ticker = time.NewTicker(interval) + function() + case <-ticker.C: + function() + case <-task.cancel: + return + } + + if !task.Recurring { + break + } + } + }() + + return task +} + +func (task *ScheduledTask) Cancel() { + close(task.cancel) + <-task.cancelled +} + +func (task *ScheduledTask) String() string { + return fmt.Sprintf( + "%s\nInterval: %s\nRecurring: %t\n", + task.Name, + task.Interval.String(), + task.Recurring, + ) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/scheme.go b/vendor/github.com/mattermost/mattermost/server/public/model/scheme.go new file mode 100644 index 00000000..60382199 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/scheme.go @@ -0,0 +1,351 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "fmt" + "regexp" + + "github.com/mattermost/mattermost/server/public/utils/timeutils" +) + +const ( + SchemeDisplayNameMaxLength = 128 + SchemeNameMaxLength = 64 + SchemeDescriptionMaxLength = 1024 + SchemeScopeTeam = "team" + SchemeScopeChannel = "channel" + SchemeScopePlaybook = "playbook" + SchemeScopeRun = "run" +) + +type Scheme struct { + Id string `json:"id"` + Name string `json:"name"` + DisplayName string `json:"display_name"` + Description string `json:"description"` + CreateAt int64 `json:"create_at"` + UpdateAt int64 `json:"update_at"` + DeleteAt int64 `json:"delete_at"` + Scope string `json:"scope"` + DefaultTeamAdminRole string `json:"default_team_admin_role"` + DefaultTeamUserRole string `json:"default_team_user_role"` + DefaultChannelAdminRole string `json:"default_channel_admin_role"` + DefaultChannelUserRole string `json:"default_channel_user_role"` + DefaultTeamGuestRole string `json:"default_team_guest_role"` + DefaultChannelGuestRole string `json:"default_channel_guest_role"` + DefaultPlaybookAdminRole string `json:"default_playbook_admin_role"` + DefaultPlaybookMemberRole string `json:"default_playbook_member_role"` + DefaultRunAdminRole string `json:"default_run_admin_role"` + DefaultRunMemberRole string `json:"default_run_member_role"` +} + +func (scheme *Scheme) Auditable() map[string]any { + return map[string]any{ + "id": scheme.Id, + "name": scheme.Name, + "display_name": scheme.DisplayName, + "description": scheme.Description, + "create_at": scheme.CreateAt, + "update_at": scheme.UpdateAt, + "delete_at": scheme.DeleteAt, + "scope": scheme.Scope, + "default_team_admin_role": scheme.DefaultTeamAdminRole, + "default_team_user_role": scheme.DefaultTeamUserRole, + "default_channel_admin_role": scheme.DefaultChannelAdminRole, + "default_channel_user_role": scheme.DefaultChannelUserRole, + "default_team_guest_role": scheme.DefaultTeamGuestRole, + "default_channel_guest_role": scheme.DefaultChannelGuestRole, + "default_playbook_admin_role": scheme.DefaultPlaybookAdminRole, + "default_playbook_member_role": scheme.DefaultPlaybookMemberRole, + "default_run_admin_role": scheme.DefaultRunAdminRole, + "default_run_member_role": scheme.DefaultRunMemberRole, + } +} + +func (scheme *Scheme) Sanitize() { + scheme.Name = FakeSetting + scheme.DisplayName = FakeSetting + scheme.Description = FakeSetting +} + +func (scheme *Scheme) MarshalYAML() (any, error) { + return struct { + Id string `yaml:"id"` + Name string `yaml:"name"` + DisplayName string `yaml:"display_name"` + Description string `yaml:"description"` + CreateAt string `yaml:"create_at"` + UpdateAt string `yaml:"update_at"` + DeleteAt string `yaml:"delete_at"` + Scope string `yaml:"scope"` + DefaultTeamAdminRole string `yaml:"default_team_admin_role"` + DefaultTeamUserRole string `yaml:"default_team_user_role"` + DefaultChannelAdminRole string `yaml:"default_channel_admin_role"` + DefaultChannelUserRole string `yaml:"default_channel_user_role"` + DefaultTeamGuestRole string `yaml:"default_team_guest_role"` + DefaultChannelGuestRole string `yaml:"default_channel_guest_role"` + DefaultPlaybookAdminRole string `yaml:"default_playbook_admin_role"` + DefaultPlaybookMemberRole string `yaml:"default_playbook_member_role"` + DefaultRunAdminRole string `yaml:"default_run_admin_role"` + DefaultRunMemberRole string `yaml:"default_run_member_role"` + }{ + Id: scheme.Id, + Name: scheme.Name, + DisplayName: scheme.DisplayName, + Description: scheme.Description, + CreateAt: timeutils.FormatMillis(scheme.CreateAt), + UpdateAt: timeutils.FormatMillis(scheme.UpdateAt), + DeleteAt: timeutils.FormatMillis(scheme.DeleteAt), + Scope: scheme.Scope, + DefaultTeamAdminRole: scheme.DefaultTeamAdminRole, + DefaultTeamUserRole: scheme.DefaultTeamUserRole, + DefaultChannelAdminRole: scheme.DefaultChannelAdminRole, + DefaultChannelUserRole: scheme.DefaultChannelUserRole, + DefaultTeamGuestRole: scheme.DefaultTeamGuestRole, + DefaultChannelGuestRole: scheme.DefaultChannelGuestRole, + DefaultPlaybookAdminRole: scheme.DefaultPlaybookAdminRole, + DefaultPlaybookMemberRole: scheme.DefaultPlaybookMemberRole, + DefaultRunAdminRole: scheme.DefaultRunAdminRole, + DefaultRunMemberRole: scheme.DefaultRunMemberRole, + }, nil +} + +func (scheme *Scheme) UnmarshalYAML(unmarshal func(any) error) error { + out := struct { + Id string `yaml:"id"` + Name string `yaml:"name"` + DisplayName string `yaml:"display_name"` + Description string `yaml:"description"` + CreateAt string `yaml:"create_at"` + UpdateAt string `yaml:"update_at"` + DeleteAt string `yaml:"delete_at"` + Scope string `yaml:"scope"` + DefaultTeamAdminRole string `yaml:"default_team_admin_role"` + DefaultTeamUserRole string `yaml:"default_team_user_role"` + DefaultChannelAdminRole string `yaml:"default_channel_admin_role"` + DefaultChannelUserRole string `yaml:"default_channel_user_role"` + DefaultTeamGuestRole string `yaml:"default_team_guest_role"` + DefaultChannelGuestRole string `yaml:"default_channel_guest_role"` + DefaultPlaybookAdminRole string `yaml:"default_playbook_admin_role"` + DefaultPlaybookMemberRole string `yaml:"default_playbook_member_role"` + DefaultRunAdminRole string `yaml:"default_run_admin_role"` + DefaultRunMemberRole string `yaml:"default_run_member_role"` + }{} + + err := unmarshal(&out) + if err != nil { + return err + } + + createAt, err := timeutils.ParseFormatedMillis(out.CreateAt) + if err != nil { + return err + } + updateAt, err := timeutils.ParseFormatedMillis(out.UpdateAt) + if err != nil { + return err + } + deleteAt, err := timeutils.ParseFormatedMillis(out.DeleteAt) + if err != nil { + return err + } + + *scheme = Scheme{ + Id: out.Id, + Name: out.Name, + DisplayName: out.DisplayName, + Description: out.Description, + CreateAt: createAt, + UpdateAt: updateAt, + DeleteAt: deleteAt, + Scope: out.Scope, + DefaultTeamAdminRole: out.DefaultTeamAdminRole, + DefaultTeamUserRole: out.DefaultTeamUserRole, + DefaultChannelAdminRole: out.DefaultChannelAdminRole, + DefaultChannelUserRole: out.DefaultChannelUserRole, + DefaultTeamGuestRole: out.DefaultTeamGuestRole, + DefaultChannelGuestRole: out.DefaultChannelGuestRole, + DefaultPlaybookAdminRole: out.DefaultPlaybookAdminRole, + DefaultPlaybookMemberRole: out.DefaultPlaybookMemberRole, + DefaultRunAdminRole: out.DefaultRunAdminRole, + DefaultRunMemberRole: out.DefaultRunMemberRole, + } + return nil +} + +type SchemePatch struct { + Name *string `json:"name"` + DisplayName *string `json:"display_name"` + Description *string `json:"description"` +} + +func (scheme *SchemePatch) Auditable() map[string]any { + return map[string]any{ + "name": scheme.Name, + "display_name": scheme.DisplayName, + "description": scheme.Description, + } +} + +type SchemeIDPatch struct { + SchemeID *string `json:"scheme_id"` +} + +func (p *SchemeIDPatch) Auditable() map[string]any { + return map[string]any{ + "scheme_id": p.SchemeID, + } +} + +// SchemeConveyor is used for importing and exporting a Scheme and its associated Roles. +type SchemeConveyor struct { + Name string `json:"name"` + DisplayName string `json:"display_name"` + Description string `json:"description"` + Scope string `json:"scope"` + TeamAdmin string `json:"default_team_admin_role"` + TeamUser string `json:"default_team_user_role"` + TeamGuest string `json:"default_team_guest_role"` + ChannelAdmin string `json:"default_channel_admin_role"` + ChannelUser string `json:"default_channel_user_role"` + ChannelGuest string `json:"default_channel_guest_role"` + PlaybookAdmin string `json:"default_playbook_admin_role"` + PlaybookMember string `json:"default_playbook_member_role"` + RunAdmin string `json:"default_run_admin_role"` + RunMember string `json:"default_run_member_role"` + Roles []*Role `json:"roles"` +} + +func (sc *SchemeConveyor) Scheme() *Scheme { + return &Scheme{ + DisplayName: sc.DisplayName, + Name: sc.Name, + Description: sc.Description, + Scope: sc.Scope, + DefaultTeamAdminRole: sc.TeamAdmin, + DefaultTeamUserRole: sc.TeamUser, + DefaultTeamGuestRole: sc.TeamGuest, + DefaultChannelAdminRole: sc.ChannelAdmin, + DefaultChannelUserRole: sc.ChannelUser, + DefaultChannelGuestRole: sc.ChannelGuest, + DefaultPlaybookAdminRole: sc.PlaybookAdmin, + DefaultPlaybookMemberRole: sc.PlaybookMember, + DefaultRunAdminRole: sc.RunAdmin, + DefaultRunMemberRole: sc.RunMember, + } +} + +type SchemeRoles struct { + SchemeAdmin bool `json:"scheme_admin"` + SchemeUser bool `json:"scheme_user"` + SchemeGuest bool `json:"scheme_guest"` +} + +func (s *SchemeRoles) Auditable() map[string]any { + return map[string]any{} +} + +func (scheme *Scheme) IsValid() bool { + if !IsValidId(scheme.Id) { + return false + } + + return scheme.IsValidForCreate() +} + +func (scheme *Scheme) IsValidForCreate() bool { + if scheme.DisplayName == "" || len(scheme.DisplayName) > SchemeDisplayNameMaxLength { + return false + } + + if !IsValidSchemeName(scheme.Name) { + return false + } + + if len(scheme.Description) > SchemeDescriptionMaxLength { + return false + } + + switch scheme.Scope { + case SchemeScopeTeam, SchemeScopeChannel, SchemeScopePlaybook, SchemeScopeRun: + default: + return false + } + + if !IsValidRoleName(scheme.DefaultChannelAdminRole) { + return false + } + + if !IsValidRoleName(scheme.DefaultChannelUserRole) { + return false + } + + if !IsValidRoleName(scheme.DefaultChannelGuestRole) { + return false + } + + if scheme.Scope == SchemeScopeTeam { + if !IsValidRoleName(scheme.DefaultTeamAdminRole) { + return false + } + + if !IsValidRoleName(scheme.DefaultTeamUserRole) { + return false + } + + if !IsValidRoleName(scheme.DefaultTeamGuestRole) { + return false + } + + if !IsValidRoleName(scheme.DefaultPlaybookAdminRole) { + return false + } + + if !IsValidRoleName(scheme.DefaultPlaybookMemberRole) { + return false + } + + if !IsValidRoleName(scheme.DefaultRunAdminRole) { + return false + } + + if !IsValidRoleName(scheme.DefaultRunMemberRole) { + return false + } + } + + if scheme.Scope == SchemeScopeChannel { + if scheme.DefaultTeamAdminRole != "" { + return false + } + + if scheme.DefaultTeamUserRole != "" { + return false + } + + if scheme.DefaultTeamGuestRole != "" { + return false + } + } + + return true +} + +func (scheme *Scheme) Patch(patch *SchemePatch) { + if patch.DisplayName != nil { + scheme.DisplayName = *patch.DisplayName + } + if patch.Name != nil { + scheme.Name = *patch.Name + } + if patch.Description != nil { + scheme.Description = *patch.Description + } +} + +func IsValidSchemeName(name string) bool { + re := regexp.MustCompile(fmt.Sprintf("^[a-z0-9_]{2,%d}$", SchemeNameMaxLength)) + return re.MatchString(name) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/search_params.go b/vendor/github.com/mattermost/mattermost/server/public/model/search_params.go new file mode 100644 index 00000000..1b8a254e --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/search_params.go @@ -0,0 +1,398 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "net/http" + "regexp" + "strings" + "time" +) + +var searchTermPuncStart = regexp.MustCompile(`^[^\pL\d\s#"]+`) +var searchTermPuncEnd = regexp.MustCompile(`[^\pL\p{M}\d\s*"]+$`) + +type SearchParams struct { + Terms string `json:"terms,omitempty"` + ExcludedTerms string `json:"excluded_terms,omitempty"` + IsHashtag bool `json:"ishashtag,omitempty"` + InChannels []string `json:"in_channels,omitempty"` + ExcludedChannels []string `json:"excluded_channels,omitempty"` + FromUsers []string `json:"from_users,omitempty"` + ExcludedUsers []string `json:"excluded_users,omitempty"` + AfterDate string `json:"after_date,omitempty"` + ExcludedAfterDate string `json:"excluded_after_date,omitempty"` + BeforeDate string `json:"before_date,omitempty"` + ExcludedBeforeDate string `json:"excluded_before_date,omitempty"` + Extensions []string `json:"extensions,omitempty"` + ExcludedExtensions []string `json:"excluded_extensions,omitempty"` + OnDate string `json:"on_date,omitempty"` + ExcludedDate string `json:"excluded_date,omitempty"` + OrTerms bool `json:"or_terms,omitempty"` + IncludeDeletedChannels bool `json:"include_deleted_channels,omitempty"` + TimeZoneOffset int `json:"timezone_offset,omitempty"` + // True if this search doesn't originate from a "current user". + SearchWithoutUserId bool `json:"search_without_user_id,omitempty"` + Modifier string `json:"modifier"` +} + +// Returns the epoch timestamp of the start of the day specified by SearchParams.AfterDate +func (p *SearchParams) GetAfterDateMillis() int64 { + date, err := time.Parse("2006-01-02", PadDateStringZeros(p.AfterDate)) + if err != nil { + date = time.Now() + } + + // travel forward 1 day + oneDay := time.Hour * 24 + afterDate := date.Add(oneDay) + return GetStartOfDayMillis(afterDate, p.TimeZoneOffset) +} + +// Returns the epoch timestamp of the start of the day specified by SearchParams.ExcludedAfterDate +func (p *SearchParams) GetExcludedAfterDateMillis() int64 { + date, err := time.Parse("2006-01-02", PadDateStringZeros(p.ExcludedAfterDate)) + if err != nil { + date = time.Now() + } + + // travel forward 1 day + oneDay := time.Hour * 24 + afterDate := date.Add(oneDay) + return GetStartOfDayMillis(afterDate, p.TimeZoneOffset) +} + +// Returns the epoch timestamp of the end of the day specified by SearchParams.BeforeDate +func (p *SearchParams) GetBeforeDateMillis() int64 { + date, err := time.Parse("2006-01-02", PadDateStringZeros(p.BeforeDate)) + if err != nil { + return 0 + } + + // travel back 1 day + oneDay := time.Hour * -24 + beforeDate := date.Add(oneDay) + return GetEndOfDayMillis(beforeDate, p.TimeZoneOffset) +} + +// Returns the epoch timestamp of the end of the day specified by SearchParams.ExcludedBeforeDate +func (p *SearchParams) GetExcludedBeforeDateMillis() int64 { + date, err := time.Parse("2006-01-02", PadDateStringZeros(p.ExcludedBeforeDate)) + if err != nil { + return 0 + } + + // travel back 1 day + oneDay := time.Hour * -24 + beforeDate := date.Add(oneDay) + return GetEndOfDayMillis(beforeDate, p.TimeZoneOffset) +} + +// Returns the epoch timestamps of the start and end of the day specified by SearchParams.OnDate +func (p *SearchParams) GetOnDateMillis() (int64, int64) { + date, err := time.Parse("2006-01-02", PadDateStringZeros(p.OnDate)) + if err != nil { + return 0, 0 + } + + return GetStartOfDayMillis(date, p.TimeZoneOffset), GetEndOfDayMillis(date, p.TimeZoneOffset) +} + +// Returns the epoch timestamps of the start and end of the day specified by SearchParams.ExcludedDate +func (p *SearchParams) GetExcludedDateMillis() (int64, int64) { + date, err := time.Parse("2006-01-02", PadDateStringZeros(p.ExcludedDate)) + if err != nil { + return 0, 0 + } + + return GetStartOfDayMillis(date, p.TimeZoneOffset), GetEndOfDayMillis(date, p.TimeZoneOffset) +} + +var searchFlags = [...]string{"from", "channel", "in", "before", "after", "on", "ext"} + +type flag struct { + name string + value string + exclude bool +} + +type searchWord struct { + value string + exclude bool +} + +func splitWords(text string) []string { + words := []string{} + + foundQuote := false + location := 0 + for i, char := range text { + if char == '"' { + if foundQuote { + // Grab the quoted section + word := text[location : i+1] + words = append(words, word) + foundQuote = false + location = i + 1 + } else { + nextStart := i + if i > 0 && text[i-1] == '-' { + nextStart = i - 1 + } + words = append(words, strings.Fields(text[location:nextStart])...) + foundQuote = true + location = nextStart + } + } + } + + words = append(words, strings.Fields(text[location:])...) + + return words +} + +func parseSearchFlags(input []string) ([]searchWord, []flag) { + words := []searchWord{} + flags := []flag{} + + skipNextWord := false + for i, word := range input { + if skipNextWord { + skipNextWord = false + continue + } + + isFlag := false + + if colon := strings.Index(word, ":"); colon != -1 { + var flagName string + var exclude bool + if strings.HasPrefix(word, "-") { + flagName = word[1:colon] + exclude = true + } else { + flagName = word[:colon] + exclude = false + } + + value := word[colon+1:] + + for _, searchFlag := range searchFlags { + // check for case insensitive equality + if strings.EqualFold(flagName, searchFlag) { + if value != "" { + flags = append(flags, flag{ + searchFlag, + value, + exclude, + }) + isFlag = true + } else if i < len(input)-1 { + flags = append(flags, flag{ + searchFlag, + input[i+1], + exclude, + }) + skipNextWord = true + isFlag = true + } + + if isFlag { + break + } + } + } + } + + if !isFlag { + exclude := false + if strings.HasPrefix(word, "-") { + exclude = true + } + // trim off surrounding punctuation (note that we leave trailing asterisks to allow wildcards) + word = searchTermPuncStart.ReplaceAllString(word, "") + word = searchTermPuncEnd.ReplaceAllString(word, "") + + // and remove extra pound #s + word = hashtagStart.ReplaceAllString(word, "#") + + if word != "" { + words = append(words, searchWord{ + word, + exclude, + }) + } + } + } + + return words, flags +} + +func ParseSearchParams(text string, timeZoneOffset int) []*SearchParams { + words, flags := parseSearchFlags(splitWords(text)) + + hashtagTermList := []string{} + excludedHashtagTermList := []string{} + plainTermList := []string{} + excludedPlainTermList := []string{} + + for _, word := range words { + if validHashtag.MatchString(word.value) { + if word.exclude { + excludedHashtagTermList = append(excludedHashtagTermList, word.value) + } else { + hashtagTermList = append(hashtagTermList, word.value) + } + } else { + if word.exclude { + excludedPlainTermList = append(excludedPlainTermList, word.value) + } else { + plainTermList = append(plainTermList, word.value) + } + } + } + + hashtagTerms := strings.Join(hashtagTermList, " ") + excludedHashtagTerms := strings.Join(excludedHashtagTermList, " ") + plainTerms := strings.Join(plainTermList, " ") + excludedPlainTerms := strings.Join(excludedPlainTermList, " ") + + inChannels := []string{} + excludedChannels := []string{} + fromUsers := []string{} + excludedUsers := []string{} + afterDate := "" + excludedAfterDate := "" + beforeDate := "" + excludedBeforeDate := "" + onDate := "" + excludedDate := "" + excludedExtensions := []string{} + extensions := []string{} + + for _, flag := range flags { + if flag.name == "in" || flag.name == "channel" { + if flag.exclude { + excludedChannels = append(excludedChannels, flag.value) + } else { + inChannels = append(inChannels, flag.value) + } + } else if flag.name == "from" { + if flag.exclude { + excludedUsers = append(excludedUsers, flag.value) + } else { + fromUsers = append(fromUsers, flag.value) + } + } else if flag.name == "after" { + if flag.exclude { + excludedAfterDate = flag.value + } else { + afterDate = flag.value + } + } else if flag.name == "before" { + if flag.exclude { + excludedBeforeDate = flag.value + } else { + beforeDate = flag.value + } + } else if flag.name == "on" { + if flag.exclude { + excludedDate = flag.value + } else { + onDate = flag.value + } + } else if flag.name == "ext" { + if flag.exclude { + excludedExtensions = append(excludedExtensions, flag.value) + } else { + extensions = append(extensions, flag.value) + } + } + } + + paramsList := []*SearchParams{} + + if plainTerms != "" || excludedPlainTerms != "" { + paramsList = append(paramsList, &SearchParams{ + Terms: plainTerms, + ExcludedTerms: excludedPlainTerms, + IsHashtag: false, + InChannels: inChannels, + ExcludedChannels: excludedChannels, + FromUsers: fromUsers, + ExcludedUsers: excludedUsers, + AfterDate: afterDate, + ExcludedAfterDate: excludedAfterDate, + BeforeDate: beforeDate, + ExcludedBeforeDate: excludedBeforeDate, + Extensions: extensions, + ExcludedExtensions: excludedExtensions, + OnDate: onDate, + ExcludedDate: excludedDate, + TimeZoneOffset: timeZoneOffset, + }) + } + + if hashtagTerms != "" || excludedHashtagTerms != "" { + paramsList = append(paramsList, &SearchParams{ + Terms: hashtagTerms, + ExcludedTerms: excludedHashtagTerms, + IsHashtag: true, + InChannels: inChannels, + ExcludedChannels: excludedChannels, + FromUsers: fromUsers, + ExcludedUsers: excludedUsers, + AfterDate: afterDate, + ExcludedAfterDate: excludedAfterDate, + BeforeDate: beforeDate, + ExcludedBeforeDate: excludedBeforeDate, + Extensions: extensions, + ExcludedExtensions: excludedExtensions, + OnDate: onDate, + ExcludedDate: excludedDate, + TimeZoneOffset: timeZoneOffset, + }) + } + + // special case for when no terms are specified but we still have a filter + if plainTerms == "" && hashtagTerms == "" && + excludedPlainTerms == "" && excludedHashtagTerms == "" && + (len(inChannels) != 0 || len(fromUsers) != 0 || + len(excludedChannels) != 0 || len(excludedUsers) != 0 || + len(extensions) != 0 || len(excludedExtensions) != 0 || + afterDate != "" || excludedAfterDate != "" || + beforeDate != "" || excludedBeforeDate != "" || + onDate != "" || excludedDate != "") { + paramsList = append(paramsList, &SearchParams{ + Terms: "", + ExcludedTerms: "", + IsHashtag: false, + InChannels: inChannels, + ExcludedChannels: excludedChannels, + FromUsers: fromUsers, + ExcludedUsers: excludedUsers, + AfterDate: afterDate, + ExcludedAfterDate: excludedAfterDate, + BeforeDate: beforeDate, + ExcludedBeforeDate: excludedBeforeDate, + Extensions: extensions, + ExcludedExtensions: excludedExtensions, + OnDate: onDate, + ExcludedDate: excludedDate, + TimeZoneOffset: timeZoneOffset, + }) + } + + return paramsList +} + +func IsSearchParamsListValid(paramsList []*SearchParams) *AppError { + // All SearchParams should have same IncludeDeletedChannels value. + for _, params := range paramsList { + if params.IncludeDeletedChannels != paramsList[0].IncludeDeletedChannels { + return NewAppError("IsSearchParamsListValid", "model.search_params_list.is_valid.include_deleted_channels.app_error", nil, "", http.StatusInternalServerError) + } + } + return nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/security_bulletin.go b/vendor/github.com/mattermost/mattermost/server/public/model/security_bulletin.go new file mode 100644 index 00000000..fa5662cf --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/security_bulletin.go @@ -0,0 +1,11 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +type SecurityBulletin struct { + Id string `json:"id"` + AppliesToVersion string `json:"applies_to_version"` +} + +type SecurityBulletins []SecurityBulletin diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/service_environment.go b/vendor/github.com/mattermost/mattermost/server/public/model/service_environment.go new file mode 100644 index 00000000..e531e35a --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/service_environment.go @@ -0,0 +1,45 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "os" + "strings" +) + +const ( + // ServiceEnvironmentProduction represents the production self-managed or cloud + // environments. This can be configured explicitly with MM_SERVICEENVIRONMENT explicitly + // set to "production", but is also the default for any production builds. + ServiceEnvironmentProduction = "production" + // ServiceEnvironmentTest represents testing environments in which MM_SERVICEENVIRONMENT + // is set explicitly to "test". + ServiceEnvironmentTest = "test" + // ServiceEnvironmentDev represents development environments. This can be configured + // explicitly with MM_SERVICEENVIRONMENT set to "dev", but is also the default for any + // non-production builds. + ServiceEnvironmentDev = "dev" +) + +// GetServiceEnvironment returns the currently configured external service environment, +// deciding which public key is used to validate enterprise licenses, which telemetry keys are +// active, and which Stripe keys are in use. +// +// To configure an environment other than default, set MM_SERVICEENVIRONMENT before +// starting the application. Production builds default to ServiceEnvironmentProduction, and +// non-production builds default to ServiceEnvironmentDev. +// +// Note that this configuration is explicitly not part of the model.Config data structure, as it +// should never be persisted to the config store nor accidentally configured in any other way than +// the MM_SERVICEENVIRONMENT variable. +func GetServiceEnvironment() string { + externalServiceEnvironment := strings.TrimSpace(strings.ToLower(os.Getenv("MM_SERVICEENVIRONMENT"))) + + switch externalServiceEnvironment { + case ServiceEnvironmentProduction, ServiceEnvironmentTest, ServiceEnvironmentDev: + return externalServiceEnvironment + } + + return getDefaultServiceEnvironment() +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/service_environment_dev_default.go b/vendor/github.com/mattermost/mattermost/server/public/model/service_environment_dev_default.go new file mode 100644 index 00000000..8b665c36 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/service_environment_dev_default.go @@ -0,0 +1,10 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +//go:build !production + +package model + +func getDefaultServiceEnvironment() string { + return ServiceEnvironmentDev +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/service_environment_production_default.go b/vendor/github.com/mattermost/mattermost/server/public/model/service_environment_production_default.go new file mode 100644 index 00000000..4d027bf5 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/service_environment_production_default.go @@ -0,0 +1,10 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +//go:build production + +package model + +func getDefaultServiceEnvironment() string { + return ServiceEnvironmentProduction +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/session.go b/vendor/github.com/mattermost/mattermost/server/public/model/session.go new file mode 100644 index 00000000..fa067253 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/session.go @@ -0,0 +1,293 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "net/http" + "strconv" + "strings" + + "github.com/mattermost/mattermost/server/public/shared/mlog" +) + +const ( + SessionCookieToken = "MMAUTHTOKEN" + SessionCookieUser = "MMUSERID" + SessionCookieCsrf = "MMCSRF" + SessionCookieCloudUrl = "MMCLOUDURL" + SessionCacheSize = 35000 + SessionPropPlatform = "platform" + SessionPropOs = "os" + SessionPropBrowser = "browser" + SessionPropType = "type" + SessionPropUserAccessTokenId = "user_access_token_id" + SessionPropIsBot = "is_bot" + SessionPropIsBotValue = "true" + SessionPropOAuthAppID = "oauth_app_id" + SessionPropMattermostAppID = "mattermost_app_id" + SessionPropLastRemovedDeviceId = "last_removed_device_id" + SessionPropDeviceNotificationDisabled = "device_notification_disabled" + SessionPropMobileVersion = "mobile_version" + SessionTypeUserAccessToken = "UserAccessToken" + SessionTypeCloudKey = "CloudKey" + SessionTypeRemoteclusterToken = "RemoteClusterToken" + SessionPropIsGuest = "is_guest" + SessionActivityTimeout = 1000 * 60 * 5 // 5 minutes + SessionUserAccessTokenExpiryHours = 100 * 365 * 24 // 100 years +) + +//msgp:tuple StringMap +type StringMap map[string]string + +type MobileSessionMetadata struct { + Version string + Platform string + Count float64 + NotificationDisabled string +} + +// Session contains the user session details. +// This struct's serializer methods are auto-generated. If a new field is added/removed, +// please run make gen-serialized. +// +//msgp:tuple Session +type Session struct { + Id string `json:"id"` + Token string `json:"token"` + CreateAt int64 `json:"create_at"` + ExpiresAt int64 `json:"expires_at"` + LastActivityAt int64 `json:"last_activity_at"` + UserId string `json:"user_id"` + DeviceId string `json:"device_id"` + Roles string `json:"roles"` + IsOAuth bool `json:"is_oauth"` + ExpiredNotify bool `json:"expired_notify"` + Props StringMap `json:"props"` + TeamMembers []*TeamMember `json:"team_members" db:"-"` + Local bool `json:"local" db:"-"` +} + +func (s *Session) Auditable() map[string]any { + return map[string]any{ + "id": s.Id, + "create_at": s.CreateAt, + "expires_at": s.ExpiresAt, + "last_activity_at": s.LastActivityAt, + "user_id": s.UserId, + "device_id": s.DeviceId, + "roles": s.Roles, + "is_oauth": s.IsOAuth, + "expired_notify": s.ExpiredNotify, + "local": s.Local, + } +} + +// IsUnrestricted returns true if the session is unrestricted, which should grant it +// with all permissions. This is used for local mode sessions +func (s *Session) IsUnrestricted() bool { + return s.Local +} + +func (s *Session) DeepCopy() *Session { + copySession := *s + + if s.Props != nil { + copySession.Props = CopyStringMap(s.Props) + } + + if s.TeamMembers != nil { + copySession.TeamMembers = make([]*TeamMember, len(s.TeamMembers)) + for index, tm := range s.TeamMembers { + copySession.TeamMembers[index] = new(TeamMember) + *copySession.TeamMembers[index] = *tm + } + } + + return ©Session +} + +func (s *Session) IsValid() *AppError { + if !IsValidId(s.Id) { + return NewAppError("Session.IsValid", "model.session.is_valid.id.app_error", nil, "", http.StatusBadRequest) + } + + if !IsValidId(s.UserId) { + return NewAppError("Session.IsValid", "model.session.is_valid.user_id.app_error", nil, "", http.StatusBadRequest) + } + + if s.CreateAt == 0 { + return NewAppError("Session.IsValid", "model.session.is_valid.create_at.app_error", nil, "", http.StatusBadRequest) + } + + if len(s.Roles) > UserRolesMaxLength { + return NewAppError("Session.IsValid", "model.session.is_valid.roles_limit.app_error", + map[string]any{"Limit": UserRolesMaxLength}, "session_id="+s.Id, http.StatusBadRequest) + } + + return nil +} + +func (s *Session) PreSave() { + if s.Id == "" { + s.Id = NewId() + } + + if s.Token == "" { + s.Token = NewId() + } + + s.CreateAt = GetMillis() + s.LastActivityAt = s.CreateAt + + if s.Props == nil { + s.Props = make(map[string]string) + } +} + +func (s *Session) Sanitize() { + s.Token = "" +} + +func (s *Session) IsExpired() bool { + if s.ExpiresAt <= 0 { + return false + } + + if GetMillis() > s.ExpiresAt { + return true + } + + return false +} + +func (s *Session) AddProp(key string, value string) { + if s.Props == nil { + s.Props = make(map[string]string) + } + + s.Props[key] = value +} + +func (s *Session) GetTeamByTeamId(teamId string) *TeamMember { + for _, tm := range s.TeamMembers { + if tm.TeamId == teamId { + return tm + } + } + + return nil +} + +func (s *Session) IsMobileApp() bool { + return s.DeviceId != "" || s.IsMobile() +} + +func (s *Session) IsMobile() bool { + val, ok := s.Props[UserAuthServiceIsMobile] + if !ok { + return false + } + isMobile, err := strconv.ParseBool(val) + if err != nil { + mlog.Debug("Error parsing boolean property from Session", mlog.Err(err)) + return false + } + return isMobile +} + +func (s *Session) IsSaml() bool { + val, ok := s.Props[UserAuthServiceIsSaml] + if !ok { + return false + } + isSaml, err := strconv.ParseBool(val) + if err != nil { + mlog.Debug("Error parsing boolean property from Session", mlog.Err(err)) + return false + } + return isSaml +} + +func (s *Session) IsOAuthUser() bool { + val, ok := s.Props[UserAuthServiceIsOAuth] + if !ok { + return false + } + isOAuthUser, err := strconv.ParseBool(val) + if err != nil { + mlog.Debug("Error parsing boolean property from Session", mlog.Err(err)) + return false + } + return isOAuthUser +} + +func (s *Session) IsBotUser() bool { + val, ok := s.Props[SessionPropIsBot] + if !ok { + return false + } + if val == SessionPropIsBotValue { + return true + } + return false +} + +func (s *Session) IsUserAccessToken() bool { + val, ok := s.Props[SessionPropType] + if !ok { + return false + } + if val == SessionTypeUserAccessToken { + return true + } + return false +} + +// Returns true when session is authenticated as a bot, by personal access token, or is an OAuth app. +// Does not indicate other forms of integrations e.g. webhooks, slash commands, etc. +func (s *Session) IsIntegration() bool { + return s.IsBotUser() || s.IsUserAccessToken() || s.IsOAuth +} + +func (s *Session) IsSSOLogin() bool { + return s.IsOAuthUser() || s.IsSaml() +} + +func (s *Session) IsGuest() bool { + val, ok := s.Props[SessionPropIsGuest] + if !ok { + return false + } + return val == "true" +} + +func (s *Session) GetUserRoles() []string { + return strings.Fields(s.Roles) +} + +func (s *Session) GenerateCSRF() string { + token := NewId() + s.AddProp("csrf", token) + return token +} + +func (s *Session) GetCSRF() string { + if s.Props == nil { + return "" + } + + return s.Props["csrf"] +} + +func (s *Session) CreateAt_() float64 { + return float64(s.CreateAt) +} + +func (s *Session) ExpiresAt_() float64 { + return float64(s.ExpiresAt) +} + +func (s *Session) LastActivityAt_() float64 { + return float64(s.LastActivityAt) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/session_serial_gen.go b/vendor/github.com/mattermost/mattermost/server/public/model/session_serial_gen.go new file mode 100644 index 00000000..f962cc03 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/session_serial_gen.go @@ -0,0 +1,718 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +// Code generated by github.com/tinylib/msgp DO NOT EDIT. + +import ( + "github.com/tinylib/msgp/msgp" +) + +// DecodeMsg implements msgp.Decodable +func (z *MobileSessionMetadata) DecodeMsg(dc *msgp.Reader) (err error) { + var field []byte + _ = field + var zb0001 uint32 + zb0001, err = dc.ReadMapHeader() + if err != nil { + err = msgp.WrapError(err) + return + } + for zb0001 > 0 { + zb0001-- + field, err = dc.ReadMapKeyPtr() + if err != nil { + err = msgp.WrapError(err) + return + } + switch msgp.UnsafeString(field) { + case "Version": + z.Version, err = dc.ReadString() + if err != nil { + err = msgp.WrapError(err, "Version") + return + } + case "Platform": + z.Platform, err = dc.ReadString() + if err != nil { + err = msgp.WrapError(err, "Platform") + return + } + case "Count": + z.Count, err = dc.ReadFloat64() + if err != nil { + err = msgp.WrapError(err, "Count") + return + } + case "NotificationDisabled": + z.NotificationDisabled, err = dc.ReadString() + if err != nil { + err = msgp.WrapError(err, "NotificationDisabled") + return + } + default: + err = dc.Skip() + if err != nil { + err = msgp.WrapError(err) + return + } + } + } + return +} + +// EncodeMsg implements msgp.Encodable +func (z *MobileSessionMetadata) EncodeMsg(en *msgp.Writer) (err error) { + // map header, size 4 + // write "Version" + err = en.Append(0x84, 0xa7, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e) + if err != nil { + return + } + err = en.WriteString(z.Version) + if err != nil { + err = msgp.WrapError(err, "Version") + return + } + // write "Platform" + err = en.Append(0xa8, 0x50, 0x6c, 0x61, 0x74, 0x66, 0x6f, 0x72, 0x6d) + if err != nil { + return + } + err = en.WriteString(z.Platform) + if err != nil { + err = msgp.WrapError(err, "Platform") + return + } + // write "Count" + err = en.Append(0xa5, 0x43, 0x6f, 0x75, 0x6e, 0x74) + if err != nil { + return + } + err = en.WriteFloat64(z.Count) + if err != nil { + err = msgp.WrapError(err, "Count") + return + } + // write "NotificationDisabled" + err = en.Append(0xb4, 0x4e, 0x6f, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64) + if err != nil { + return + } + err = en.WriteString(z.NotificationDisabled) + if err != nil { + err = msgp.WrapError(err, "NotificationDisabled") + return + } + return +} + +// MarshalMsg implements msgp.Marshaler +func (z *MobileSessionMetadata) MarshalMsg(b []byte) (o []byte, err error) { + o = msgp.Require(b, z.Msgsize()) + // map header, size 4 + // string "Version" + o = append(o, 0x84, 0xa7, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e) + o = msgp.AppendString(o, z.Version) + // string "Platform" + o = append(o, 0xa8, 0x50, 0x6c, 0x61, 0x74, 0x66, 0x6f, 0x72, 0x6d) + o = msgp.AppendString(o, z.Platform) + // string "Count" + o = append(o, 0xa5, 0x43, 0x6f, 0x75, 0x6e, 0x74) + o = msgp.AppendFloat64(o, z.Count) + // string "NotificationDisabled" + o = append(o, 0xb4, 0x4e, 0x6f, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64) + o = msgp.AppendString(o, z.NotificationDisabled) + return +} + +// UnmarshalMsg implements msgp.Unmarshaler +func (z *MobileSessionMetadata) UnmarshalMsg(bts []byte) (o []byte, err error) { + var field []byte + _ = field + var zb0001 uint32 + zb0001, bts, err = msgp.ReadMapHeaderBytes(bts) + if err != nil { + err = msgp.WrapError(err) + return + } + for zb0001 > 0 { + zb0001-- + field, bts, err = msgp.ReadMapKeyZC(bts) + if err != nil { + err = msgp.WrapError(err) + return + } + switch msgp.UnsafeString(field) { + case "Version": + z.Version, bts, err = msgp.ReadStringBytes(bts) + if err != nil { + err = msgp.WrapError(err, "Version") + return + } + case "Platform": + z.Platform, bts, err = msgp.ReadStringBytes(bts) + if err != nil { + err = msgp.WrapError(err, "Platform") + return + } + case "Count": + z.Count, bts, err = msgp.ReadFloat64Bytes(bts) + if err != nil { + err = msgp.WrapError(err, "Count") + return + } + case "NotificationDisabled": + z.NotificationDisabled, bts, err = msgp.ReadStringBytes(bts) + if err != nil { + err = msgp.WrapError(err, "NotificationDisabled") + return + } + default: + bts, err = msgp.Skip(bts) + if err != nil { + err = msgp.WrapError(err) + return + } + } + } + o = bts + return +} + +// Msgsize returns an upper bound estimate of the number of bytes occupied by the serialized message +func (z *MobileSessionMetadata) Msgsize() (s int) { + s = 1 + 8 + msgp.StringPrefixSize + len(z.Version) + 9 + msgp.StringPrefixSize + len(z.Platform) + 6 + msgp.Float64Size + 21 + msgp.StringPrefixSize + len(z.NotificationDisabled) + return +} + +// DecodeMsg implements msgp.Decodable +func (z *Session) DecodeMsg(dc *msgp.Reader) (err error) { + var zb0001 uint32 + zb0001, err = dc.ReadArrayHeader() + if err != nil { + err = msgp.WrapError(err) + return + } + if zb0001 != 13 { + err = msgp.ArrayError{Wanted: 13, Got: zb0001} + return + } + z.Id, err = dc.ReadString() + if err != nil { + err = msgp.WrapError(err, "Id") + return + } + z.Token, err = dc.ReadString() + if err != nil { + err = msgp.WrapError(err, "Token") + return + } + z.CreateAt, err = dc.ReadInt64() + if err != nil { + err = msgp.WrapError(err, "CreateAt") + return + } + z.ExpiresAt, err = dc.ReadInt64() + if err != nil { + err = msgp.WrapError(err, "ExpiresAt") + return + } + z.LastActivityAt, err = dc.ReadInt64() + if err != nil { + err = msgp.WrapError(err, "LastActivityAt") + return + } + z.UserId, err = dc.ReadString() + if err != nil { + err = msgp.WrapError(err, "UserId") + return + } + z.DeviceId, err = dc.ReadString() + if err != nil { + err = msgp.WrapError(err, "DeviceId") + return + } + z.Roles, err = dc.ReadString() + if err != nil { + err = msgp.WrapError(err, "Roles") + return + } + z.IsOAuth, err = dc.ReadBool() + if err != nil { + err = msgp.WrapError(err, "IsOAuth") + return + } + z.ExpiredNotify, err = dc.ReadBool() + if err != nil { + err = msgp.WrapError(err, "ExpiredNotify") + return + } + var zb0002 uint32 + zb0002, err = dc.ReadMapHeader() + if err != nil { + err = msgp.WrapError(err, "Props") + return + } + if z.Props == nil { + z.Props = make(StringMap, zb0002) + } else if len(z.Props) > 0 { + for key := range z.Props { + delete(z.Props, key) + } + } + for zb0002 > 0 { + zb0002-- + var za0001 string + var za0002 string + za0001, err = dc.ReadString() + if err != nil { + err = msgp.WrapError(err, "Props") + return + } + za0002, err = dc.ReadString() + if err != nil { + err = msgp.WrapError(err, "Props", za0001) + return + } + z.Props[za0001] = za0002 + } + var zb0003 uint32 + zb0003, err = dc.ReadArrayHeader() + if err != nil { + err = msgp.WrapError(err, "TeamMembers") + return + } + if cap(z.TeamMembers) >= int(zb0003) { + z.TeamMembers = (z.TeamMembers)[:zb0003] + } else { + z.TeamMembers = make([]*TeamMember, zb0003) + } + for za0003 := range z.TeamMembers { + if dc.IsNil() { + err = dc.ReadNil() + if err != nil { + err = msgp.WrapError(err, "TeamMembers", za0003) + return + } + z.TeamMembers[za0003] = nil + } else { + if z.TeamMembers[za0003] == nil { + z.TeamMembers[za0003] = new(TeamMember) + } + err = z.TeamMembers[za0003].DecodeMsg(dc) + if err != nil { + err = msgp.WrapError(err, "TeamMembers", za0003) + return + } + } + } + z.Local, err = dc.ReadBool() + if err != nil { + err = msgp.WrapError(err, "Local") + return + } + return +} + +// EncodeMsg implements msgp.Encodable +func (z *Session) EncodeMsg(en *msgp.Writer) (err error) { + // array header, size 13 + err = en.Append(0x9d) + if err != nil { + return + } + err = en.WriteString(z.Id) + if err != nil { + err = msgp.WrapError(err, "Id") + return + } + err = en.WriteString(z.Token) + if err != nil { + err = msgp.WrapError(err, "Token") + return + } + err = en.WriteInt64(z.CreateAt) + if err != nil { + err = msgp.WrapError(err, "CreateAt") + return + } + err = en.WriteInt64(z.ExpiresAt) + if err != nil { + err = msgp.WrapError(err, "ExpiresAt") + return + } + err = en.WriteInt64(z.LastActivityAt) + if err != nil { + err = msgp.WrapError(err, "LastActivityAt") + return + } + err = en.WriteString(z.UserId) + if err != nil { + err = msgp.WrapError(err, "UserId") + return + } + err = en.WriteString(z.DeviceId) + if err != nil { + err = msgp.WrapError(err, "DeviceId") + return + } + err = en.WriteString(z.Roles) + if err != nil { + err = msgp.WrapError(err, "Roles") + return + } + err = en.WriteBool(z.IsOAuth) + if err != nil { + err = msgp.WrapError(err, "IsOAuth") + return + } + err = en.WriteBool(z.ExpiredNotify) + if err != nil { + err = msgp.WrapError(err, "ExpiredNotify") + return + } + err = en.WriteMapHeader(uint32(len(z.Props))) + if err != nil { + err = msgp.WrapError(err, "Props") + return + } + for za0001, za0002 := range z.Props { + err = en.WriteString(za0001) + if err != nil { + err = msgp.WrapError(err, "Props") + return + } + err = en.WriteString(za0002) + if err != nil { + err = msgp.WrapError(err, "Props", za0001) + return + } + } + err = en.WriteArrayHeader(uint32(len(z.TeamMembers))) + if err != nil { + err = msgp.WrapError(err, "TeamMembers") + return + } + for za0003 := range z.TeamMembers { + if z.TeamMembers[za0003] == nil { + err = en.WriteNil() + if err != nil { + return + } + } else { + err = z.TeamMembers[za0003].EncodeMsg(en) + if err != nil { + err = msgp.WrapError(err, "TeamMembers", za0003) + return + } + } + } + err = en.WriteBool(z.Local) + if err != nil { + err = msgp.WrapError(err, "Local") + return + } + return +} + +// MarshalMsg implements msgp.Marshaler +func (z *Session) MarshalMsg(b []byte) (o []byte, err error) { + o = msgp.Require(b, z.Msgsize()) + // array header, size 13 + o = append(o, 0x9d) + o = msgp.AppendString(o, z.Id) + o = msgp.AppendString(o, z.Token) + o = msgp.AppendInt64(o, z.CreateAt) + o = msgp.AppendInt64(o, z.ExpiresAt) + o = msgp.AppendInt64(o, z.LastActivityAt) + o = msgp.AppendString(o, z.UserId) + o = msgp.AppendString(o, z.DeviceId) + o = msgp.AppendString(o, z.Roles) + o = msgp.AppendBool(o, z.IsOAuth) + o = msgp.AppendBool(o, z.ExpiredNotify) + o = msgp.AppendMapHeader(o, uint32(len(z.Props))) + for za0001, za0002 := range z.Props { + o = msgp.AppendString(o, za0001) + o = msgp.AppendString(o, za0002) + } + o = msgp.AppendArrayHeader(o, uint32(len(z.TeamMembers))) + for za0003 := range z.TeamMembers { + if z.TeamMembers[za0003] == nil { + o = msgp.AppendNil(o) + } else { + o, err = z.TeamMembers[za0003].MarshalMsg(o) + if err != nil { + err = msgp.WrapError(err, "TeamMembers", za0003) + return + } + } + } + o = msgp.AppendBool(o, z.Local) + return +} + +// UnmarshalMsg implements msgp.Unmarshaler +func (z *Session) UnmarshalMsg(bts []byte) (o []byte, err error) { + var zb0001 uint32 + zb0001, bts, err = msgp.ReadArrayHeaderBytes(bts) + if err != nil { + err = msgp.WrapError(err) + return + } + if zb0001 != 13 { + err = msgp.ArrayError{Wanted: 13, Got: zb0001} + return + } + z.Id, bts, err = msgp.ReadStringBytes(bts) + if err != nil { + err = msgp.WrapError(err, "Id") + return + } + z.Token, bts, err = msgp.ReadStringBytes(bts) + if err != nil { + err = msgp.WrapError(err, "Token") + return + } + z.CreateAt, bts, err = msgp.ReadInt64Bytes(bts) + if err != nil { + err = msgp.WrapError(err, "CreateAt") + return + } + z.ExpiresAt, bts, err = msgp.ReadInt64Bytes(bts) + if err != nil { + err = msgp.WrapError(err, "ExpiresAt") + return + } + z.LastActivityAt, bts, err = msgp.ReadInt64Bytes(bts) + if err != nil { + err = msgp.WrapError(err, "LastActivityAt") + return + } + z.UserId, bts, err = msgp.ReadStringBytes(bts) + if err != nil { + err = msgp.WrapError(err, "UserId") + return + } + z.DeviceId, bts, err = msgp.ReadStringBytes(bts) + if err != nil { + err = msgp.WrapError(err, "DeviceId") + return + } + z.Roles, bts, err = msgp.ReadStringBytes(bts) + if err != nil { + err = msgp.WrapError(err, "Roles") + return + } + z.IsOAuth, bts, err = msgp.ReadBoolBytes(bts) + if err != nil { + err = msgp.WrapError(err, "IsOAuth") + return + } + z.ExpiredNotify, bts, err = msgp.ReadBoolBytes(bts) + if err != nil { + err = msgp.WrapError(err, "ExpiredNotify") + return + } + var zb0002 uint32 + zb0002, bts, err = msgp.ReadMapHeaderBytes(bts) + if err != nil { + err = msgp.WrapError(err, "Props") + return + } + if z.Props == nil { + z.Props = make(StringMap, zb0002) + } else if len(z.Props) > 0 { + for key := range z.Props { + delete(z.Props, key) + } + } + for zb0002 > 0 { + var za0001 string + var za0002 string + zb0002-- + za0001, bts, err = msgp.ReadStringBytes(bts) + if err != nil { + err = msgp.WrapError(err, "Props") + return + } + za0002, bts, err = msgp.ReadStringBytes(bts) + if err != nil { + err = msgp.WrapError(err, "Props", za0001) + return + } + z.Props[za0001] = za0002 + } + var zb0003 uint32 + zb0003, bts, err = msgp.ReadArrayHeaderBytes(bts) + if err != nil { + err = msgp.WrapError(err, "TeamMembers") + return + } + if cap(z.TeamMembers) >= int(zb0003) { + z.TeamMembers = (z.TeamMembers)[:zb0003] + } else { + z.TeamMembers = make([]*TeamMember, zb0003) + } + for za0003 := range z.TeamMembers { + if msgp.IsNil(bts) { + bts, err = msgp.ReadNilBytes(bts) + if err != nil { + return + } + z.TeamMembers[za0003] = nil + } else { + if z.TeamMembers[za0003] == nil { + z.TeamMembers[za0003] = new(TeamMember) + } + bts, err = z.TeamMembers[za0003].UnmarshalMsg(bts) + if err != nil { + err = msgp.WrapError(err, "TeamMembers", za0003) + return + } + } + } + z.Local, bts, err = msgp.ReadBoolBytes(bts) + if err != nil { + err = msgp.WrapError(err, "Local") + return + } + o = bts + return +} + +// Msgsize returns an upper bound estimate of the number of bytes occupied by the serialized message +func (z *Session) Msgsize() (s int) { + s = 1 + msgp.StringPrefixSize + len(z.Id) + msgp.StringPrefixSize + len(z.Token) + msgp.Int64Size + msgp.Int64Size + msgp.Int64Size + msgp.StringPrefixSize + len(z.UserId) + msgp.StringPrefixSize + len(z.DeviceId) + msgp.StringPrefixSize + len(z.Roles) + msgp.BoolSize + msgp.BoolSize + msgp.MapHeaderSize + if z.Props != nil { + for za0001, za0002 := range z.Props { + _ = za0002 + s += msgp.StringPrefixSize + len(za0001) + msgp.StringPrefixSize + len(za0002) + } + } + s += msgp.ArrayHeaderSize + for za0003 := range z.TeamMembers { + if z.TeamMembers[za0003] == nil { + s += msgp.NilSize + } else { + s += z.TeamMembers[za0003].Msgsize() + } + } + s += msgp.BoolSize + return +} + +// DecodeMsg implements msgp.Decodable +func (z *StringMap) DecodeMsg(dc *msgp.Reader) (err error) { + var zb0003 uint32 + zb0003, err = dc.ReadMapHeader() + if err != nil { + err = msgp.WrapError(err) + return + } + if (*z) == nil { + (*z) = make(StringMap, zb0003) + } else if len((*z)) > 0 { + for key := range *z { + delete((*z), key) + } + } + for zb0003 > 0 { + zb0003-- + var zb0001 string + var zb0002 string + zb0001, err = dc.ReadString() + if err != nil { + err = msgp.WrapError(err) + return + } + zb0002, err = dc.ReadString() + if err != nil { + err = msgp.WrapError(err, zb0001) + return + } + (*z)[zb0001] = zb0002 + } + return +} + +// EncodeMsg implements msgp.Encodable +func (z StringMap) EncodeMsg(en *msgp.Writer) (err error) { + err = en.WriteMapHeader(uint32(len(z))) + if err != nil { + err = msgp.WrapError(err) + return + } + for zb0004, zb0005 := range z { + err = en.WriteString(zb0004) + if err != nil { + err = msgp.WrapError(err) + return + } + err = en.WriteString(zb0005) + if err != nil { + err = msgp.WrapError(err, zb0004) + return + } + } + return +} + +// MarshalMsg implements msgp.Marshaler +func (z StringMap) MarshalMsg(b []byte) (o []byte, err error) { + o = msgp.Require(b, z.Msgsize()) + o = msgp.AppendMapHeader(o, uint32(len(z))) + for zb0004, zb0005 := range z { + o = msgp.AppendString(o, zb0004) + o = msgp.AppendString(o, zb0005) + } + return +} + +// UnmarshalMsg implements msgp.Unmarshaler +func (z *StringMap) UnmarshalMsg(bts []byte) (o []byte, err error) { + var zb0003 uint32 + zb0003, bts, err = msgp.ReadMapHeaderBytes(bts) + if err != nil { + err = msgp.WrapError(err) + return + } + if (*z) == nil { + (*z) = make(StringMap, zb0003) + } else if len((*z)) > 0 { + for key := range *z { + delete((*z), key) + } + } + for zb0003 > 0 { + var zb0001 string + var zb0002 string + zb0003-- + zb0001, bts, err = msgp.ReadStringBytes(bts) + if err != nil { + err = msgp.WrapError(err) + return + } + zb0002, bts, err = msgp.ReadStringBytes(bts) + if err != nil { + err = msgp.WrapError(err, zb0001) + return + } + (*z)[zb0001] = zb0002 + } + o = bts + return +} + +// Msgsize returns an upper bound estimate of the number of bytes occupied by the serialized message +func (z StringMap) Msgsize() (s int) { + s = msgp.MapHeaderSize + if z != nil { + for zb0004, zb0005 := range z { + _ = zb0005 + s += msgp.StringPrefixSize + len(zb0004) + msgp.StringPrefixSize + len(zb0005) + } + } + return +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/shared_channel.go b/vendor/github.com/mattermost/mattermost/server/public/model/shared_channel.go new file mode 100644 index 00000000..cccba8ea --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/shared_channel.go @@ -0,0 +1,361 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "encoding/json" + "net/http" + "unicode/utf8" + + "github.com/pkg/errors" +) + +const ( + UserPropsKeyRemoteUsername = "RemoteUsername" + UserPropsKeyRemoteEmail = "RemoteEmail" + UserPropsKeyOriginalRemoteId = "OriginalRemoteId" + UserOriginalRemoteIdUnknown = "UNKNOWN" +) + +var ( + ErrChannelAlreadyShared = errors.New("channel is already shared") + ErrChannelHomedOnRemote = errors.New("channel is homed on a remote cluster") + ErrChannelAlreadyExists = errors.New("channel already exists") +) + +// SharedChannel represents a channel that can be synchronized with a remote cluster. +// If "home" is true, then the shared channel is homed locally and "SharedChannelRemote" +// table contains the remote clusters that have been invited. +// If "home" is false, then the shared channel is homed remotely, and "RemoteId" +// field points to the remote cluster connection in "RemoteClusters" table. +type SharedChannel struct { + ChannelId string `json:"id"` + TeamId string `json:"team_id"` + Home bool `json:"home"` + ReadOnly bool `json:"readonly"` + ShareName string `json:"name"` + ShareDisplayName string `json:"display_name"` + SharePurpose string `json:"purpose"` + ShareHeader string `json:"header"` + CreatorId string `json:"creator_id"` + CreateAt int64 `json:"create_at"` + UpdateAt int64 `json:"update_at"` + RemoteId string `json:"remote_id,omitempty"` // if not "home" + Type ChannelType `db:"-"` +} + +func (sc *SharedChannel) IsValid() *AppError { + if !IsValidId(sc.ChannelId) { + return NewAppError("SharedChannel.IsValid", "model.channel.is_valid.id.app_error", nil, "ChannelId="+sc.ChannelId, http.StatusBadRequest) + } + + if sc.Type != ChannelTypeDirect && sc.Type != ChannelTypeGroup && !IsValidId(sc.TeamId) { + return NewAppError("SharedChannel.IsValid", "model.channel.is_valid.id.app_error", nil, "TeamId="+sc.TeamId, http.StatusBadRequest) + } + + if sc.CreateAt == 0 { + return NewAppError("SharedChannel.IsValid", "model.channel.is_valid.create_at.app_error", nil, "id="+sc.ChannelId, http.StatusBadRequest) + } + + if sc.UpdateAt == 0 { + return NewAppError("SharedChannel.IsValid", "model.channel.is_valid.update_at.app_error", nil, "id="+sc.ChannelId, http.StatusBadRequest) + } + + if utf8.RuneCountInString(sc.ShareDisplayName) > ChannelDisplayNameMaxRunes { + return NewAppError("SharedChannel.IsValid", "model.channel.is_valid.display_name.app_error", nil, "id="+sc.ChannelId, http.StatusBadRequest) + } + + if !IsValidChannelIdentifier(sc.ShareName) { + return NewAppError("SharedChannel.IsValid", "model.channel.is_valid.1_or_more.app_error", nil, "id="+sc.ChannelId, http.StatusBadRequest) + } + + if utf8.RuneCountInString(sc.ShareHeader) > ChannelHeaderMaxRunes { + return NewAppError("SharedChannel.IsValid", "model.channel.is_valid.header.app_error", nil, "id="+sc.ChannelId, http.StatusBadRequest) + } + + if utf8.RuneCountInString(sc.SharePurpose) > ChannelPurposeMaxRunes { + return NewAppError("SharedChannel.IsValid", "model.channel.is_valid.purpose.app_error", nil, "id="+sc.ChannelId, http.StatusBadRequest) + } + + if !IsValidId(sc.CreatorId) { + return NewAppError("SharedChannel.IsValid", "model.channel.is_valid.creator_id.app_error", nil, "CreatorId="+sc.CreatorId, http.StatusBadRequest) + } + + if !sc.Home { + if !IsValidId(sc.RemoteId) { + return NewAppError("SharedChannel.IsValid", "model.channel.is_valid.id.app_error", nil, "RemoteId="+sc.RemoteId, http.StatusBadRequest) + } + } + return nil +} + +func (sc *SharedChannel) PreSave() { + sc.ShareName = SanitizeUnicode(sc.ShareName) + sc.ShareDisplayName = SanitizeUnicode(sc.ShareDisplayName) + + sc.CreateAt = GetMillis() + sc.UpdateAt = sc.CreateAt +} + +func (sc *SharedChannel) PreUpdate() { + sc.UpdateAt = GetMillis() + sc.ShareName = SanitizeUnicode(sc.ShareName) + sc.ShareDisplayName = SanitizeUnicode(sc.ShareDisplayName) +} + +// SharedChannelRemote represents a remote cluster that has been invited +// to a shared channel. +type SharedChannelRemote struct { + Id string `json:"id"` + ChannelId string `json:"channel_id"` + CreatorId string `json:"creator_id"` + CreateAt int64 `json:"create_at"` + UpdateAt int64 `json:"update_at"` + DeleteAt int64 `json:"delete_at"` + IsInviteAccepted bool `json:"is_invite_accepted"` + IsInviteConfirmed bool `json:"is_invite_confirmed"` + RemoteId string `json:"remote_id"` + LastPostUpdateAt int64 `json:"last_post_update_at"` + LastPostUpdateID string `json:"last_post_id"` + LastPostCreateAt int64 `json:"last_post_create_at"` + LastPostCreateID string `json:"last_post_create_id"` + LastMembersSyncAt int64 `json:"last_members_sync_at"` +} + +func (sc *SharedChannelRemote) IsValid() *AppError { + if !IsValidId(sc.Id) { + return NewAppError("SharedChannelRemote.IsValid", "model.channel.is_valid.id.app_error", nil, "Id="+sc.Id, http.StatusBadRequest) + } + + if !IsValidId(sc.ChannelId) { + return NewAppError("SharedChannelRemote.IsValid", "model.channel.is_valid.id.app_error", nil, "ChannelId="+sc.ChannelId, http.StatusBadRequest) + } + + if sc.CreateAt == 0 { + return NewAppError("SharedChannelRemote.IsValid", "model.channel.is_valid.create_at.app_error", nil, "id="+sc.ChannelId, http.StatusBadRequest) + } + + if sc.UpdateAt == 0 { + return NewAppError("SharedChannelRemote.IsValid", "model.channel.is_valid.update_at.app_error", nil, "id="+sc.ChannelId, http.StatusBadRequest) + } + + if !IsValidId(sc.CreatorId) { + return NewAppError("SharedChannelRemote.IsValid", "model.channel.is_valid.creator_id.app_error", nil, "id="+sc.CreatorId, http.StatusBadRequest) + } + return nil +} + +func (sc *SharedChannelRemote) PreSave() { + if sc.Id == "" { + sc.Id = NewId() + } + sc.CreateAt = GetMillis() + sc.UpdateAt = sc.CreateAt +} + +func (sc *SharedChannelRemote) PreUpdate() { + sc.UpdateAt = GetMillis() +} + +type SharedChannelRemoteStatus struct { + ChannelId string `json:"channel_id"` + DisplayName string `json:"display_name"` + SiteURL string `json:"site_url"` + LastPingAt int64 `json:"last_ping_at"` + NextSyncAt int64 `json:"next_sync_at"` + ReadOnly bool `json:"readonly"` + IsInviteAccepted bool `json:"is_invite_accepted"` + Token string `json:"token"` +} + +// SharedChannelUser stores a lastSyncAt timestamp on behalf of a remote cluster for +// each user that has been synchronized. +type SharedChannelUser struct { + Id string `json:"id"` + UserId string `json:"user_id"` + ChannelId string `json:"channel_id"` + RemoteId string `json:"remote_id"` + CreateAt int64 `json:"create_at"` + LastSyncAt int64 `json:"last_sync_at"` + LastMembershipSyncAt int64 `json:"last_membership_sync_at"` +} + +func (scu *SharedChannelUser) PreSave() { + scu.Id = NewId() + scu.CreateAt = GetMillis() +} + +func (scu *SharedChannelUser) IsValid() *AppError { + if !IsValidId(scu.Id) { + return NewAppError("SharedChannelUser.IsValid", "model.channel.is_valid.id.app_error", nil, "Id="+scu.Id, http.StatusBadRequest) + } + + if !IsValidId(scu.UserId) { + return NewAppError("SharedChannelUser.IsValid", "model.channel.is_valid.id.app_error", nil, "UserId="+scu.UserId, http.StatusBadRequest) + } + + if !IsValidId(scu.ChannelId) { + return NewAppError("SharedChannelUser.IsValid", "model.channel.is_valid.id.app_error", nil, "ChannelId="+scu.ChannelId, http.StatusBadRequest) + } + + if !IsValidId(scu.RemoteId) { + return NewAppError("SharedChannelUser.IsValid", "model.channel.is_valid.id.app_error", nil, "RemoteId="+scu.RemoteId, http.StatusBadRequest) + } + + if scu.CreateAt == 0 { + return NewAppError("SharedChannelUser.IsValid", "model.channel.is_valid.create_at.app_error", nil, "", http.StatusBadRequest) + } + return nil +} + +type GetUsersForSyncFilter struct { + CheckProfileImage bool + ChannelID string + Limit uint64 +} + +// SharedChannelAttachment stores a lastSyncAt timestamp on behalf of a remote cluster for +// each file attachment that has been synchronized. +type SharedChannelAttachment struct { + Id string `json:"id"` + FileId string `json:"file_id"` + RemoteId string `json:"remote_id"` + CreateAt int64 `json:"create_at"` + LastSyncAt int64 `json:"last_sync_at"` +} + +func (scf *SharedChannelAttachment) PreSave() { + if scf.Id == "" { + scf.Id = NewId() + } + if scf.CreateAt == 0 { + scf.CreateAt = GetMillis() + scf.LastSyncAt = scf.CreateAt + } else { + scf.LastSyncAt = GetMillis() + } +} + +func (scf *SharedChannelAttachment) IsValid() *AppError { + if !IsValidId(scf.Id) { + return NewAppError("SharedChannelAttachment.IsValid", "model.channel.is_valid.id.app_error", nil, "Id="+scf.Id, http.StatusBadRequest) + } + + if !IsValidId(scf.FileId) { + return NewAppError("SharedChannelAttachment.IsValid", "model.channel.is_valid.id.app_error", nil, "FileId="+scf.FileId, http.StatusBadRequest) + } + + if !IsValidId(scf.RemoteId) { + return NewAppError("SharedChannelAttachment.IsValid", "model.channel.is_valid.id.app_error", nil, "RemoteId="+scf.RemoteId, http.StatusBadRequest) + } + + if scf.CreateAt == 0 { + return NewAppError("SharedChannelAttachment.IsValid", "model.channel.is_valid.create_at.app_error", nil, "", http.StatusBadRequest) + } + return nil +} + +type SharedChannelFilterOpts struct { + TeamId string + CreatorId string + MemberId string + ExcludeHome bool + ExcludeRemote bool +} + +type SharedChannelRemoteFilterOpts struct { + ChannelId string + RemoteId string + IncludeUnconfirmed bool + ExcludeConfirmed bool + ExcludeHome bool + ExcludeRemote bool + IncludeDeleted bool +} + +// MembershipChangeMsg represents a change in channel membership +type MembershipChangeMsg struct { + ChannelId string `json:"channel_id"` + UserId string `json:"user_id"` + IsAdd bool `json:"is_add"` + RemoteId string `json:"remote_id"` + ChangeTime int64 `json:"change_time"` +} + +// SyncMsg represents a change in content (post add/edit/delete, reaction add/remove, users). +// It is sent to remote clusters as the payload of a `RemoteClusterMsg`. +type SyncMsg struct { + Id string `json:"id"` + ChannelId string `json:"channel_id"` + Users map[string]*User `json:"users,omitempty"` + Posts []*Post `json:"posts,omitempty"` + Reactions []*Reaction `json:"reactions,omitempty"` + Statuses []*Status `json:"statuses,omitempty"` + MembershipChanges []*MembershipChangeMsg `json:"membership_changes,omitempty"` + Acknowledgements []*PostAcknowledgement `json:"acknowledgements,omitempty"` + MentionTransforms map[string]string `json:"mention_transforms,omitempty"` +} + +func NewSyncMsg(channelID string) *SyncMsg { + return &SyncMsg{ + Id: NewId(), + ChannelId: channelID, + } +} + +func (sm *SyncMsg) ToJSON() ([]byte, error) { + b, err := json.Marshal(sm) + if err != nil { + return nil, err + } + return b, nil +} + +func (sm *SyncMsg) String() string { + json, err := sm.ToJSON() + if err != nil { + return "" + } + return string(json) +} + +// SyncResponse represents the response to a synchronization event +type SyncResponse struct { + UsersLastUpdateAt int64 `json:"users_last_update_at"` + UserErrors []string `json:"user_errors"` + UsersSyncd []string `json:"users_syncd"` + + PostsLastUpdateAt int64 `json:"posts_last_update_at"` + PostErrors []string `json:"post_errors"` + + ReactionsLastUpdateAt int64 `json:"reactions_last_update_at"` + ReactionErrors []string `json:"reaction_errors"` + + AcknowledgementsLastUpdateAt int64 `json:"acknowledgements_last_update_at"` + AcknowledgementErrors []string `json:"acknowledgement_errors"` + + StatusErrors []string `json:"status_errors"` // user IDs for which the status sync failed +} + +// RegisterPluginOpts is passed by plugins to the `RegisterPluginForSharedChannels` plugin API +// to provide options for registering as a shared channels remote. +type RegisterPluginOpts struct { + Displayname string // a displayname used in status reports + PluginID string // id of this plugin registering + CreatorID string // id of the user/bot registering + AutoShareDMs bool // when true, all DMs are automatically shared to this remote + AutoInvited bool // when true, the plugin is automatically invited and sync'd with all shared channels. +} + +// GetOptionFlags returns a Bitmask of option flags as specified by the boolean options. +func (po RegisterPluginOpts) GetOptionFlags() Bitmask { + var flags Bitmask + if po.AutoShareDMs { + flags |= BitflagOptionAutoShareDMs + } + if po.AutoInvited { + flags |= BitflagOptionAutoInvited + } + return flags +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/slack_attachment.go b/vendor/github.com/mattermost/mattermost/server/public/model/slack_attachment.go new file mode 100644 index 00000000..53523be4 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/slack_attachment.go @@ -0,0 +1,292 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "fmt" + "reflect" + "regexp" + "slices" + + "github.com/hashicorp/go-multierror" +) + +var ( + linkWithTextRegex = regexp.MustCompile(`<([^<\|]+)\|([^>]+)>`) + hexColorRegex = regexp.MustCompile(`^#[0-9a-fA-F]{6}$`) +) + +type SlackAttachment struct { + Id int64 `json:"id"` + Fallback string `json:"fallback"` + Color string `json:"color"` + Pretext string `json:"pretext"` + AuthorName string `json:"author_name"` + AuthorLink string `json:"author_link"` + AuthorIcon string `json:"author_icon"` + Title string `json:"title"` + TitleLink string `json:"title_link"` + Text string `json:"text"` + Fields []*SlackAttachmentField `json:"fields"` + ImageURL string `json:"image_url"` + ThumbURL string `json:"thumb_url"` + Footer string `json:"footer"` + FooterIcon string `json:"footer_icon"` + Timestamp any `json:"ts"` // This is either a string or an int64 + Actions []*PostAction `json:"actions,omitempty"` +} + +func (s *SlackAttachment) IsValid() error { + var multiErr *multierror.Error + + if s.Color != "" { + validStyles := []string{"good", "warning", "danger"} + // If not a predefined style, check if it's a hex color + if !slices.Contains(validStyles, s.Color) && !hexColorRegex.MatchString(s.Color) { + multiErr = multierror.Append(multiErr, fmt.Errorf("invalid style '%s' - must be one of [good, warning, danger] or a hex color", s.Color)) + } + } + + if s.AuthorLink != "" { + if s.AuthorName == "" { + multiErr = multierror.Append(multiErr, fmt.Errorf("author link cannot be set without author name")) + } + + if !IsValidHTTPURL(s.AuthorLink) { + multiErr = multierror.Append(multiErr, fmt.Errorf("invalid author link URL")) + } + } + + if s.AuthorIcon != "" && !IsValidHTTPURL(s.AuthorIcon) { + multiErr = multierror.Append(multiErr, fmt.Errorf("invalid author icon URL")) + } + + if s.TitleLink != "" { + if s.Title == "" { + multiErr = multierror.Append(multiErr, fmt.Errorf("title link cannot be set without title")) + } + + if !IsValidHTTPURL(s.TitleLink) { + multiErr = multierror.Append(multiErr, fmt.Errorf("invalid title link URL")) + } + } + + for _, field := range s.Fields { + if err := field.IsValid(); err != nil { + multiErr = multierror.Append(multiErr, err) + } + } + + if s.ImageURL != "" && !IsValidHTTPURL(s.ImageURL) { + multiErr = multierror.Append(multiErr, fmt.Errorf("invalid image URL")) + } + + if s.ThumbURL != "" && !IsValidHTTPURL(s.ThumbURL) { + multiErr = multierror.Append(multiErr, fmt.Errorf("invalid thumb URL")) + } + + if s.FooterIcon != "" && !IsValidHTTPURL(s.FooterIcon) { + multiErr = multierror.Append(multiErr, fmt.Errorf("invalid footer icon URL")) + } + + // Validate timestamp is either string or int64 + if s.Timestamp != nil { + switch s.Timestamp.(type) { + case string, int64: + // Valid types + default: + multiErr = multierror.Append(multiErr, fmt.Errorf("timestamp must be either a string or int64")) + } + } + + for i, action := range s.Actions { + if err := action.IsValid(); err != nil { + multiErr = multierror.Append(multiErr, multierror.Prefix(err, fmt.Sprintf("action at index %d is invalid:", i))) + } + } + + return multiErr.ErrorOrNil() +} + +func (s *SlackAttachment) Equals(input *SlackAttachment) bool { + // Direct comparison of simple types + + if s.Id != input.Id { + return false + } + + if s.Fallback != input.Fallback { + return false + } + + if s.Color != input.Color { + return false + } + + if s.Pretext != input.Pretext { + return false + } + + if s.AuthorName != input.AuthorName { + return false + } + + if s.AuthorLink != input.AuthorLink { + return false + } + + if s.AuthorIcon != input.AuthorIcon { + return false + } + + if s.Title != input.Title { + return false + } + + if s.TitleLink != input.TitleLink { + return false + } + + if s.Text != input.Text { + return false + } + + if s.ImageURL != input.ImageURL { + return false + } + + if s.ThumbURL != input.ThumbURL { + return false + } + + if s.Footer != input.Footer { + return false + } + + if s.FooterIcon != input.FooterIcon { + return false + } + + // Compare length & slice values of fields + if len(s.Fields) != len(input.Fields) { + return false + } + + for j := range s.Fields { + if !s.Fields[j].Equals(input.Fields[j]) { + return false + } + } + + // Compare length & slice values of actions + if len(s.Actions) != len(input.Actions) { + return false + } + + for j := range s.Actions { + if !s.Actions[j].Equals(input.Actions[j]) { + return false + } + } + + return s.Timestamp == input.Timestamp +} + +type SlackAttachmentField struct { + Title string `json:"title"` + Value any `json:"value"` + Short SlackCompatibleBool `json:"short"` +} + +func (s *SlackAttachmentField) IsValid() error { + var multiErr *multierror.Error + + if s.Value != nil { + switch s.Value.(type) { + case string, int: + // Valid types + default: + multiErr = multierror.Append(multiErr, fmt.Errorf("value must be either a string or int")) + } + } + + return multiErr.ErrorOrNil() +} + +func (s *SlackAttachmentField) Equals(input *SlackAttachmentField) bool { + if s.Title != input.Title { + return false + } + + if reflect.ValueOf(input.Value).Type().Comparable() && reflect.ValueOf(s.Value).Type().Comparable() && reflect.ValueOf(input.Value).Type() == reflect.ValueOf(s.Value).Type() { + if s.Value != input.Value { + return false + } + } else { + if !reflect.DeepEqual(s.Value, input.Value) { + return false + } + } + + return s.Short == input.Short +} + +func StringifySlackFieldValue(a []*SlackAttachment) []*SlackAttachment { + var nonNilAttachments []*SlackAttachment + for _, attachment := range a { + if attachment == nil { + continue + } + nonNilAttachments = append(nonNilAttachments, attachment) + + var nonNilFields []*SlackAttachmentField + for _, field := range attachment.Fields { + if field == nil { + continue + } + nonNilFields = append(nonNilFields, field) + + if field.Value != nil { + // Ensure the value is set to a string if it is set + field.Value = fmt.Sprintf("%v", field.Value) + } + } + attachment.Fields = nonNilFields + } + return nonNilAttachments +} + +// This method only parses and processes the attachments, +// all else should be set in the post which is passed +func ParseSlackAttachment(post *Post, attachments []*SlackAttachment) { + if post.Type == "" { + post.Type = PostTypeSlackAttachment + } + + postAttachments := []*SlackAttachment{} + + for _, attachment := range attachments { + if attachment == nil { + continue + } + + attachment.Text = ParseSlackLinksToMarkdown(attachment.Text) + attachment.Pretext = ParseSlackLinksToMarkdown(attachment.Pretext) + + for _, field := range attachment.Fields { + if field == nil { + continue + } + if value, ok := field.Value.(string); ok { + field.Value = ParseSlackLinksToMarkdown(value) + } + } + postAttachments = append(postAttachments, attachment) + } + post.AddProp(PostPropsAttachments, postAttachments) +} + +func ParseSlackLinksToMarkdown(text string) string { + return linkWithTextRegex.ReplaceAllString(text, "[${2}](${1})") +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/slack_compatibility.go b/vendor/github.com/mattermost/mattermost/server/public/model/slack_compatibility.go new file mode 100644 index 00000000..2d3e2878 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/slack_compatibility.go @@ -0,0 +1,30 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "fmt" + "strings" +) + +// SlackCompatibleBool is an alias for bool that implements json.Unmarshaler +type SlackCompatibleBool bool + +// UnmarshalJSON implements json.Unmarshaler +// +// Slack allows bool values to be represented as strings ("true"/"false") or +// literals (true/false). To maintain compatibility, we define an Unmarshaler +// that supports both. +func (b *SlackCompatibleBool) UnmarshalJSON(data []byte) error { + value := strings.ToLower(string(data)) + if value == "true" || value == `"true"` { + *b = true + } else if value == "false" || value == `"false"` { + *b = false + } else { + return fmt.Errorf("unmarshal: unable to convert %s to bool", data) + } + + return nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/status.go b/vendor/github.com/mattermost/mattermost/server/public/model/status.go new file mode 100644 index 00000000..6c400063 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/status.go @@ -0,0 +1,63 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "encoding/json" + "time" +) + +const ( + StatusOutOfOffice = "ooo" + StatusOffline = "offline" + StatusAway = "away" + StatusDnd = "dnd" + StatusOnline = "online" + StatusCacheSize = SessionCacheSize + StatusChannelTimeout = 20000 // 20 seconds + StatusMinUpdateTime = 120000 // 2 minutes + + // DNDExpiryInterval is how often the job to expire temporary DND statuses runs. + DNDExpiryInterval = 1 * time.Minute +) + +type Status struct { + UserId string `json:"user_id"` + Status string `json:"status"` + Manual bool `json:"manual"` + LastActivityAt int64 `json:"last_activity_at"` + ActiveChannel string `json:"active_channel,omitempty" db:"-"` + + // DNDEndTime is the time that the user's DND status will expire. Unlike other timestamps in Mattermost, this value + // is in seconds instead of milliseconds. + DNDEndTime int64 `json:"dnd_end_time"` + + PrevStatus string `json:"-"` +} + +func (s *Status) ToJSON() ([]byte, error) { + sCopy := *s + sCopy.ActiveChannel = "" + return json.Marshal(sCopy) +} + +func StatusListToJSON(u []*Status) ([]byte, error) { + list := make([]Status, len(u)) + for i, s := range u { + list[i] = *s + list[i].ActiveChannel = "" + } + return json.Marshal(list) +} + +func StatusMapToInterfaceMap(statusMap map[string]*Status) map[string]any { + interfaceMap := map[string]any{} + for _, s := range statusMap { + // Omitted statues mean offline + if s.Status != StatusOffline { + interfaceMap[s.UserId] = s.Status + } + } + return interfaceMap +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/suggest_command.go b/vendor/github.com/mattermost/mattermost/server/public/model/suggest_command.go new file mode 100644 index 00000000..7fb045fc --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/suggest_command.go @@ -0,0 +1,9 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +type SuggestCommand struct { + Suggestion string `json:"suggestion"` + Description string `json:"description"` +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/support_packet.go b/vendor/github.com/mattermost/mattermost/server/public/model/support_packet.go new file mode 100644 index 00000000..8f8dddae --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/support_packet.go @@ -0,0 +1,181 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "encoding/json" + "io" +) + +const ( + CurrentSupportPacketVersion = 2 + SupportPacketErrorFile = "warning.txt" +) + +type SupportPacketDiagnostics struct { + Version int `yaml:"version"` + + License struct { + Company string `yaml:"company"` + Users int `yaml:"users"` + SkuShortName string `yaml:"sku_short_name"` + IsTrial bool `yaml:"is_trial,omitempty"` + IsGovSKU bool `yaml:"is_gov_sku,omitempty"` + } `yaml:"license"` + + Server struct { + OS string `yaml:"os"` + Architecture string `yaml:"architecture"` + Hostname string `yaml:"hostname"` + Version string `yaml:"version"` + BuildHash string `yaml:"build_hash"` + InstallationType string `yaml:"installation_type"` + } `yaml:"server"` + + Config struct { + Source string `yaml:"store_type"` + } `yaml:"config"` + + Database struct { + Type string `yaml:"type"` + Version string `yaml:"version"` + SchemaVersion string `yaml:"schema_version"` + MasterConnectios int `yaml:"master_connections"` + ReplicaConnectios int `yaml:"replica_connections"` + SearchConnections int `yaml:"search_connections"` + } `yaml:"database"` + + FileStore struct { + Status string `yaml:"file_status"` + Error string `yaml:"erorr,omitempty"` + Driver string `yaml:"file_driver"` + } `yaml:"file_store"` + + Websocket struct { + Connections int `yaml:"connections"` + } `yaml:"websocket"` + + Cluster struct { + ID string `yaml:"id"` + NumberOfNodes int `yaml:"number_of_nodes"` + } `yaml:"cluster"` + + LDAP struct { + Status string `yaml:"status,omitempty"` + Error string `yaml:"error,omitempty"` + ServerName string `yaml:"server_name,omitempty"` + ServerVersion string `yaml:"server_version,omitempty"` + } `yaml:"ldap"` + + SAML struct { + ProviderType string `yaml:"provider_type,omitempty"` + } `yaml:"saml"` + + ElasticSearch struct { + Backend string `yaml:"backend,omitempty"` + ServerVersion string `yaml:"server_version,omitempty"` + ServerPlugins []string `yaml:"server_plugins,omitempty"` + Error string `yaml:"error,omitempty"` + } `yaml:"elastic"` +} + +type SupportPacketStats struct { + RegisteredUsers int64 `yaml:"registered_users"` + ActiveUsers int64 `yaml:"active_users"` + DailyActiveUsers int64 `yaml:"daily_active_users"` + MonthlyActiveUsers int64 `yaml:"monthly_active_users"` + DeactivatedUsers int64 `yaml:"deactivated_users"` + Guests int64 `yaml:"guests"` + BotAccounts int64 `yaml:"bot_accounts"` + Posts int64 `yaml:"posts"` + Channels int64 `yaml:"channels"` + Teams int64 `yaml:"teams"` + SlashCommands int64 `yaml:"slash_commands"` + IncomingWebhooks int64 `yaml:"incoming_webhooks"` + OutgoingWebhooks int64 `yaml:"outgoing_webhooks"` +} + +// SupportPacketJobList contains the list of latest run enterprise job runs. +// It is included in the Support Packet. +type SupportPacketJobList struct { + LDAPSyncJobs []*Job `yaml:"ldap_sync_jobs"` + DataRetentionJobs []*Job `yaml:"data_retention_jobs"` + MessageExportJobs []*Job `yaml:"message_export_jobs"` + ElasticPostIndexingJobs []*Job `yaml:"elastic_post_indexing_jobs"` + ElasticPostAggregationJobs []*Job `yaml:"elastic_post_aggregation_jobs"` + MigrationJobs []*Job `yaml:"migration_jobs"` +} + +// SupportPacketPermissionInfo contains the list of schemes and the list of roles. +// It is included in the Support Packet. +type SupportPacketPermissionInfo struct { + Roles []*Role `yaml:"roles"` + Schemes []*Scheme `yaml:"schemes"` +} + +// SupportPacketConfig contains the Mattermost configuration. In contrast to [Config], it also contains the list of Feature Flags. +// It is included in the Support Packet. +type SupportPacketConfig struct { + *Config + FeatureFlags FeatureFlags `json:"FeatureFlags"` +} + +// SupportPacketPluginList contains the list of enabled and disabled plugins. +// It is included in the Support Packet. +type SupportPacketPluginList struct { + Enabled []Manifest `json:"enabled"` + Disabled []Manifest `json:"disabled"` +} + +// SupportPacketDatabaseSchema contains the database schema information. +// It is included in the Support Packet. +type SupportPacketDatabaseSchema struct { + DatabaseCollation string `yaml:"database_collation,omitempty"` + DatabaseEncoding string `yaml:"database_encoding,omitempty"` + Tables []DatabaseTable `yaml:"tables"` +} + +// DatabaseTable represents a table in the database schema. +type DatabaseTable struct { + Name string `yaml:"name"` + Collation string `yaml:"collation,omitempty"` + Options map[string]string `yaml:"options,omitempty"` + Columns []DatabaseColumn `yaml:"columns"` + Indexes []DatabaseIndex `yaml:"indexes,omitempty"` +} + +// DatabaseColumn represents a column in a database table. +type DatabaseColumn struct { + Name string `yaml:"name"` + DataType string `yaml:"data_type"` + MaxLength int64 `yaml:"max_length,omitempty"` + IsNullable bool `yaml:"is_nullable"` +} + +// DatabaseIndex represents an index in a database table. +type DatabaseIndex struct { + Name string `yaml:"name"` + Definition string `yaml:"definition"` +} + +type FileData struct { + Filename string + Body []byte +} + +type SupportPacketOptions struct { + IncludeLogs bool `json:"include_logs"` // IncludeLogs is the option to include server logs + PluginPackets []string `json:"plugin_packets"` // PluginPackets is a list of pluginids to call hooks +} + +// SupportPacketOptionsFromReader decodes a json-encoded request from the given io.Reader. +func SupportPacketOptionsFromReader(reader io.Reader) (*SupportPacketOptions, error) { + var r *SupportPacketOptions + err := json.NewDecoder(reader).Decode(&r) + if err != nil { + return nil, err + } + + return r, nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/switch_request.go b/vendor/github.com/mattermost/mattermost/server/public/model/switch_request.go new file mode 100644 index 00000000..12853f28 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/switch_request.go @@ -0,0 +1,48 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +type SwitchRequest struct { + CurrentService string `json:"current_service"` + NewService string `json:"new_service"` + Email string `json:"email"` + Password string `json:"password"` + NewPassword string `json:"new_password"` + MfaCode string `json:"mfa_code"` + LdapLoginId string `json:"ldap_id"` +} + +func (o *SwitchRequest) Auditable() map[string]any { + return map[string]any{ + "current_service": o.CurrentService, + "new_service": o.NewService, + "email": o.Email, + "ldap_login_id": o.LdapLoginId, + } +} + +func (o *SwitchRequest) EmailToOAuth() bool { + return o.CurrentService == UserAuthServiceEmail && + (o.NewService == UserAuthServiceSaml || + o.NewService == UserAuthServiceGitlab || + o.NewService == ServiceGoogle || + o.NewService == ServiceOffice365 || + o.NewService == ServiceOpenid) +} + +func (o *SwitchRequest) OAuthToEmail() bool { + return (o.CurrentService == UserAuthServiceSaml || + o.CurrentService == UserAuthServiceGitlab || + o.CurrentService == ServiceGoogle || + o.CurrentService == ServiceOffice365 || + o.CurrentService == ServiceOpenid) && o.NewService == UserAuthServiceEmail +} + +func (o *SwitchRequest) EmailToLdap() bool { + return o.CurrentService == UserAuthServiceEmail && o.NewService == UserAuthServiceLdap +} + +func (o *SwitchRequest) LdapToEmail() bool { + return o.CurrentService == UserAuthServiceLdap && o.NewService == UserAuthServiceEmail +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/system.go b/vendor/github.com/mattermost/mattermost/server/public/model/system.go new file mode 100644 index 00000000..f3a9a65e --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/system.go @@ -0,0 +1,108 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "math/big" +) + +const ( + SystemServerId = "DiagnosticId" + SystemRanUnitTests = "RanUnitTests" + SystemLastSecurityTime = "LastSecurityTime" + SystemActiveLicenseId = "ActiveLicenseId" + SystemLastComplianceTime = "LastComplianceTime" + SystemAsymmetricSigningKeyKey = "AsymmetricSigningKey" + SystemPostActionCookieSecretKey = "PostActionCookieSecret" + SystemInstallationDateKey = "InstallationDate" + SystemOrganizationName = "OrganizationName" + SystemFirstAdminRole = "FirstAdminRole" + SystemFirstServerRunTimestampKey = "FirstServerRunTimestamp" + SystemClusterEncryptionKey = "ClusterEncryptionKey" + SystemPushProxyAuthToken = "PushProxyAuthToken" + SystemUpgradedFromTeId = "UpgradedFromTE" + SystemWarnMetricNumberOfTeams5 = "warn_metric_number_of_teams_5" + SystemWarnMetricNumberOfChannels50 = "warn_metric_number_of_channels_50" + SystemWarnMetricMfa = "warn_metric_mfa" + SystemWarnMetricEmailDomain = "warn_metric_email_domain" + SystemWarnMetricNumberOfActiveUsers100 = "warn_metric_number_of_active_users_100" + SystemWarnMetricNumberOfActiveUsers200 = "warn_metric_number_of_active_users_200" + SystemWarnMetricNumberOfActiveUsers300 = "warn_metric_number_of_active_users_300" + SystemWarnMetricNumberOfActiveUsers500 = "warn_metric_number_of_active_users_500" + SystemWarnMetricNumberOfPosts2m = "warn_metric_number_of_posts_2M" + SystemWarnMetricLastRunTimestampKey = "LastWarnMetricRunTimestamp" + SystemFirstAdminVisitMarketplace = "FirstAdminVisitMarketplace" + SystemFirstAdminSetupComplete = "FirstAdminSetupComplete" + SystemLastAccessiblePostTime = "LastAccessiblePostTime" + SystemLastAccessibleFileTime = "LastAccessibleFileTime" + SystemHostedPurchaseNeedsScreening = "HostedPurchaseNeedsScreening" + AwsMeteringReportInterval = 1 + AwsMeteringDimensionUsageHrs = "UsageHrs" + CloudRenewalEmail = "CloudRenewalEmail" +) + +const ( + WarnMetricStatusLimitReached = "true" + WarnMetricStatusRunonce = "runonce" + WarnMetricStatusAck = "ack" + WarnMetricStatusStorePrefix = "warn_metric_" + WarnMetricJobInterval = 24 * 7 + WarnMetricNumberOfActiveUsers25 = 25 + WarnMetricJobWaitTime = 1000 * 3600 * 24 * 7 // 7 days +) + +type System struct { + Name string `json:"name"` + Value string `json:"value"` +} + +type SystemPostActionCookieSecret struct { + Secret []byte `json:"key,omitempty"` +} + +type SystemAsymmetricSigningKey struct { + ECDSAKey *SystemECDSAKey `json:"ecdsa_key,omitempty"` +} + +type SystemECDSAKey struct { + Curve string `json:"curve"` + X *big.Int `json:"x"` + Y *big.Int `json:"y"` + D *big.Int `json:"d,omitempty"` +} + +// ServerBusyState provides serialization for app.Busy. +type ServerBusyState struct { + Busy bool `json:"busy"` + Expires int64 `json:"expires"` + ExpiresTS string `json:"expires_ts,omitempty"` +} + +type AppliedMigration struct { + Version int `json:"version"` + Name string `json:"name"` +} + +type LogFilter struct { + ServerNames []string `json:"server_names"` + LogLevels []string `json:"log_levels"` + DateFrom string `json:"date_from"` + DateTo string `json:"date_to"` +} + +type LogEntry struct { + Timestamp string + Level string +} + +// SystemPingOptions is the options for setting contents of the system ping +// response. +type SystemPingOptions struct { + // FullStatus allows server to set the detailed information about + // the system status. + FullStatus bool + // RestSemantics allows server to return 200 code even if the server + // status is unhealthy. + RESTSemantics bool +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/team.go b/vendor/github.com/mattermost/mattermost/server/public/model/team.go new file mode 100644 index 00000000..9325ef22 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/team.go @@ -0,0 +1,293 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "fmt" + "net/http" + "regexp" + "strings" + "unicode/utf8" +) + +const ( + TeamOpen = "O" + TeamInvite = "I" + TeamAllowedDomainsMaxLength = 500 + TeamCompanyNameMaxLength = 64 + TeamDescriptionMaxLength = 255 + TeamDisplayNameMaxRunes = 64 + TeamEmailMaxLength = 128 + TeamNameMaxLength = 64 + TeamNameMinLength = 2 +) + +type Team struct { + Id string `json:"id"` + CreateAt int64 `json:"create_at"` + UpdateAt int64 `json:"update_at"` + DeleteAt int64 `json:"delete_at"` + DisplayName string `json:"display_name"` + Name string `json:"name"` + Description string `json:"description"` + Email string `json:"email"` + Type string `json:"type"` + CompanyName string `json:"company_name"` + AllowedDomains string `json:"allowed_domains"` + InviteId string `json:"invite_id"` + AllowOpenInvite bool `json:"allow_open_invite"` + LastTeamIconUpdate int64 `json:"last_team_icon_update,omitempty"` + SchemeId *string `json:"scheme_id"` + GroupConstrained *bool `json:"group_constrained"` + PolicyID *string `json:"policy_id"` + CloudLimitsArchived bool `json:"cloud_limits_archived"` +} + +func (o *Team) Auditable() map[string]any { + return map[string]any{ + "id": o.Id, + "create_at": o.CreateAt, + "update_at": o.UpdateAt, + "delete_at": o.DeleteAt, + "type": o.Type, + "invite_id": o.InviteId, + "allow_open_invite": o.AllowOpenInvite, + "scheme_id": o.SchemeId, + "group_constrained": o.GroupConstrained, + "policy_id": o.PolicyID, + "cloud_limits_archived": o.CloudLimitsArchived, + } +} + +func (o *Team) LogClone() any { + return o.Auditable() +} + +type TeamPatch struct { + DisplayName *string `json:"display_name"` + Description *string `json:"description"` + CompanyName *string `json:"company_name"` + AllowedDomains *string `json:"allowed_domains"` + AllowOpenInvite *bool `json:"allow_open_invite"` + GroupConstrained *bool `json:"group_constrained"` + CloudLimitsArchived *bool `json:"cloud_limits_archived"` +} + +func (o *TeamPatch) Auditable() map[string]any { + return map[string]any{ + "allow_open_invite": o.AllowOpenInvite, + "group_constrained": o.GroupConstrained, + "cloud_limits_archived": o.CloudLimitsArchived, + } +} + +type TeamForExport struct { + Team + SchemeName *string +} + +type Invites struct { + Invites []map[string]string `json:"invites"` +} + +type TeamsWithCount struct { + Teams []*Team `json:"teams"` + TotalCount int64 `json:"total_count"` +} + +func (o *Invites) ToEmailList() []string { + emailList := make([]string, 0, len(o.Invites)) + for _, invite := range o.Invites { + emailList = append(emailList, invite["email"]) + } + return emailList +} + +func (o *Team) Etag() string { + return Etag(o.Id, o.UpdateAt) +} + +func (o *Team) IsValid() *AppError { + if !IsValidId(o.Id) { + return NewAppError("Team.IsValid", "model.team.is_valid.id.app_error", nil, "", http.StatusBadRequest) + } + + if o.CreateAt == 0 { + return NewAppError("Team.IsValid", "model.team.is_valid.create_at.app_error", nil, "id="+o.Id, http.StatusBadRequest) + } + + if o.UpdateAt == 0 { + return NewAppError("Team.IsValid", "model.team.is_valid.update_at.app_error", nil, "id="+o.Id, http.StatusBadRequest) + } + + if len(o.Email) > TeamEmailMaxLength { + return NewAppError("Team.IsValid", "model.team.is_valid.email.app_error", nil, "id="+o.Id, http.StatusBadRequest) + } + + if o.Email != "" && !IsValidEmail(o.Email) { + return NewAppError("Team.IsValid", "model.team.is_valid.email.app_error", nil, "id="+o.Id, http.StatusBadRequest) + } + + if utf8.RuneCountInString(o.DisplayName) == 0 || utf8.RuneCountInString(o.DisplayName) > TeamDisplayNameMaxRunes { + return NewAppError("Team.IsValid", "model.team.is_valid.name.app_error", nil, "id="+o.Id, http.StatusBadRequest) + } + + if len(o.Name) > TeamNameMaxLength { + return NewAppError("Team.IsValid", "model.team.is_valid.url.app_error", nil, "id="+o.Id, http.StatusBadRequest) + } + + if len(o.Description) > TeamDescriptionMaxLength { + return NewAppError("Team.IsValid", "model.team.is_valid.description.app_error", nil, "id="+o.Id, http.StatusBadRequest) + } + + if o.InviteId == "" { + return NewAppError("Team.IsValid", "model.team.is_valid.invite_id.app_error", nil, "id="+o.Id, http.StatusBadRequest) + } + + if IsReservedTeamName(o.Name) { + return NewAppError("Team.IsValid", "model.team.is_valid.reserved.app_error", nil, "id="+o.Id, http.StatusBadRequest) + } + + if !IsValidTeamName(o.Name) { + return NewAppError("Team.IsValid", "model.team.is_valid.characters.app_error", nil, "id="+o.Id, http.StatusBadRequest) + } + + if !(o.Type == TeamOpen || o.Type == TeamInvite) { + return NewAppError("Team.IsValid", "model.team.is_valid.type.app_error", nil, "id="+o.Id, http.StatusBadRequest) + } + + if len(o.CompanyName) > TeamCompanyNameMaxLength { + return NewAppError("Team.IsValid", "model.team.is_valid.company.app_error", nil, "id="+o.Id, http.StatusBadRequest) + } + + if len(o.AllowedDomains) > TeamAllowedDomainsMaxLength { + return NewAppError("Team.IsValid", "model.team.is_valid.domains.app_error", nil, "id="+o.Id, http.StatusBadRequest) + } + + return nil +} + +func (o *Team) PreSave() { + if o.Id == "" { + o.Id = NewId() + } + + o.CreateAt = GetMillis() + o.UpdateAt = o.CreateAt + + o.Name = SanitizeUnicode(o.Name) + o.DisplayName = SanitizeUnicode(o.DisplayName) + o.Description = SanitizeUnicode(o.Description) + o.CompanyName = SanitizeUnicode(o.CompanyName) + + if o.InviteId == "" { + o.InviteId = NewId() + } +} + +func (o *Team) PreUpdate() { + o.UpdateAt = GetMillis() + o.Name = SanitizeUnicode(o.Name) + o.DisplayName = SanitizeUnicode(o.DisplayName) + o.Description = SanitizeUnicode(o.Description) + o.CompanyName = SanitizeUnicode(o.CompanyName) +} + +func IsReservedTeamName(s string) bool { + s = strings.ToLower(s) + + for _, value := range reservedName { + if strings.Index(s, value) == 0 { + return true + } + } + + return false +} + +func IsValidTeamName(s string) bool { + if !isValidAlphaNum(s) { + return false + } + + if len(s) < TeamNameMinLength { + return false + } + + return true +} + +var validTeamNameCharacter = regexp.MustCompile(`^[a-z0-9-]$`) + +func CleanTeamName(s string) string { + s = strings.ToLower(strings.Replace(s, " ", "-", -1)) + + for _, value := range reservedName { + if strings.Index(s, value) == 0 { + s = strings.Replace(s, value, "", -1) + } + } + + s = strings.TrimSpace(s) + + for _, c := range s { + char := fmt.Sprintf("%c", c) + if !validTeamNameCharacter.MatchString(char) { + s = strings.Replace(s, char, "", -1) + } + } + + s = strings.Trim(s, "-") + + if !IsValidTeamName(s) { + s = NewId() + } + + return s +} + +func (o *Team) Sanitize() { + o.Email = "" + o.InviteId = "" +} + +func (o *Team) Patch(patch *TeamPatch) { + if patch.DisplayName != nil { + o.DisplayName = *patch.DisplayName + } + + if patch.Description != nil { + o.Description = *patch.Description + } + + if patch.CompanyName != nil { + o.CompanyName = *patch.CompanyName + } + + if patch.AllowedDomains != nil { + o.AllowedDomains = *patch.AllowedDomains + } + + if patch.AllowOpenInvite != nil { + o.AllowOpenInvite = *patch.AllowOpenInvite + } + + if patch.GroupConstrained != nil { + o.GroupConstrained = patch.GroupConstrained + } + + if patch.CloudLimitsArchived != nil { + o.CloudLimitsArchived = *patch.CloudLimitsArchived + } +} + +func (o *Team) IsGroupConstrained() bool { + return o.GroupConstrained != nil && *o.GroupConstrained +} + +// ShallowCopy returns a shallow copy of team. +func (o *Team) ShallowCopy() *Team { + c := *o + return &c +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/team_member.go b/vendor/github.com/mattermost/mattermost/server/public/model/team_member.go new file mode 100644 index 00000000..53cf25f0 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/team_member.go @@ -0,0 +1,144 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "fmt" + "net/http" + "strings" +) + +const ( + USERNAME = "Username" +) + +// This struct's serializer methods are auto-generated. If a new field is added/removed, +// please run make gen-serialized. +// +//msgp:tuple TeamMember +type TeamMember struct { + TeamId string `json:"team_id"` + UserId string `json:"user_id"` + Roles string `json:"roles"` + DeleteAt int64 `json:"delete_at"` + SchemeGuest bool `json:"scheme_guest"` + SchemeUser bool `json:"scheme_user"` + SchemeAdmin bool `json:"scheme_admin"` + ExplicitRoles string `json:"explicit_roles"` + CreateAt int64 `json:"-"` +} + +func (o *TeamMember) Auditable() map[string]any { + return map[string]any{ + "team_id": o.TeamId, + "user_id": o.UserId, + "roles": o.Roles, + "delete_at": o.DeleteAt, + "scheme_guest": o.SchemeGuest, + "scheme_user": o.SchemeUser, + "scheme_admin": o.SchemeAdmin, + "explicit_roles": o.ExplicitRoles, + "create_at": o.CreateAt, + } +} + +//msgp:ignore TeamUnread +type TeamUnread struct { + TeamId string `json:"team_id"` + MsgCount int64 `json:"msg_count"` + MentionCount int64 `json:"mention_count"` + MentionCountRoot int64 `json:"mention_count_root"` + MsgCountRoot int64 `json:"msg_count_root"` + ThreadCount int64 `json:"thread_count"` + ThreadMentionCount int64 `json:"thread_mention_count"` + ThreadUrgentMentionCount int64 `json:"thread_urgent_mention_count"` +} + +//msgp:ignore TeamMemberForExport +type TeamMemberForExport struct { + TeamMember + TeamName string +} + +//msgp:ignore TeamMemberWithError +type TeamMemberWithError struct { + UserId string `json:"user_id"` + Member *TeamMember `json:"member"` + Error *AppError `json:"error"` +} + +//msgp:ignore EmailInviteWithError +type EmailInviteWithError struct { + Email string `json:"email"` + Error *AppError `json:"error"` +} + +//msgp:ignore TeamMembersGetOptions +type TeamMembersGetOptions struct { + // Sort the team members. Accepts "Username", but defaults to "Id". + Sort string + + // If true, exclude team members whose corresponding user is deleted. + ExcludeDeletedUsers bool + + // Restrict to search in a list of teams and channels + ViewRestrictions *ViewUsersRestrictions +} + +//msgp:ignore TeamInviteReminderData +type TeamInviteReminderData struct { + Interval string +} + +func EmailInviteWithErrorToEmails(o []*EmailInviteWithError) []string { + var ret []string + for _, o := range o { + if o.Error == nil { + ret = append(ret, o.Email) + } + } + return ret +} + +func EmailInviteWithErrorToString(o *EmailInviteWithError) string { + return fmt.Sprintf("%s:%s", o.Email, o.Error.Error()) +} + +func TeamMembersWithErrorToTeamMembers(o []*TeamMemberWithError) []*TeamMember { + var ret []*TeamMember + for _, o := range o { + if o.Error == nil { + ret = append(ret, o.Member) + } + } + return ret +} + +func TeamMemberWithErrorToString(o *TeamMemberWithError) string { + return fmt.Sprintf("%s:%s", o.UserId, o.Error.Error()) +} + +func (o *TeamMember) IsValid() *AppError { + if !IsValidId(o.TeamId) { + return NewAppError("TeamMember.IsValid", "model.team_member.is_valid.team_id.app_error", nil, "", http.StatusBadRequest) + } + + if !IsValidId(o.UserId) { + return NewAppError("TeamMember.IsValid", "model.team_member.is_valid.user_id.app_error", nil, "", http.StatusBadRequest) + } + + if len(o.Roles) > UserRolesMaxLength { + return NewAppError("TeamMember.IsValid", "model.team_member.is_valid.roles_limit.app_error", + map[string]any{"Limit": UserRolesMaxLength}, "", http.StatusBadRequest) + } + + return nil +} + +func (o *TeamMember) PreUpdate() { +} + +func (o *TeamMember) GetRoles() []string { + return strings.Fields(o.Roles) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/team_member_serial_gen.go b/vendor/github.com/mattermost/mattermost/server/public/model/team_member_serial_gen.go new file mode 100644 index 00000000..9cf1d231 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/team_member_serial_gen.go @@ -0,0 +1,209 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +// Code generated by github.com/tinylib/msgp DO NOT EDIT. + +import ( + "github.com/tinylib/msgp/msgp" +) + +// DecodeMsg implements msgp.Decodable +func (z *TeamMember) DecodeMsg(dc *msgp.Reader) (err error) { + var zb0001 uint32 + zb0001, err = dc.ReadArrayHeader() + if err != nil { + err = msgp.WrapError(err) + return + } + if zb0001 != 9 { + err = msgp.ArrayError{Wanted: 9, Got: zb0001} + return + } + z.TeamId, err = dc.ReadString() + if err != nil { + err = msgp.WrapError(err, "TeamId") + return + } + z.UserId, err = dc.ReadString() + if err != nil { + err = msgp.WrapError(err, "UserId") + return + } + z.Roles, err = dc.ReadString() + if err != nil { + err = msgp.WrapError(err, "Roles") + return + } + z.DeleteAt, err = dc.ReadInt64() + if err != nil { + err = msgp.WrapError(err, "DeleteAt") + return + } + z.SchemeGuest, err = dc.ReadBool() + if err != nil { + err = msgp.WrapError(err, "SchemeGuest") + return + } + z.SchemeUser, err = dc.ReadBool() + if err != nil { + err = msgp.WrapError(err, "SchemeUser") + return + } + z.SchemeAdmin, err = dc.ReadBool() + if err != nil { + err = msgp.WrapError(err, "SchemeAdmin") + return + } + z.ExplicitRoles, err = dc.ReadString() + if err != nil { + err = msgp.WrapError(err, "ExplicitRoles") + return + } + z.CreateAt, err = dc.ReadInt64() + if err != nil { + err = msgp.WrapError(err, "CreateAt") + return + } + return +} + +// EncodeMsg implements msgp.Encodable +func (z *TeamMember) EncodeMsg(en *msgp.Writer) (err error) { + // array header, size 9 + err = en.Append(0x99) + if err != nil { + return + } + err = en.WriteString(z.TeamId) + if err != nil { + err = msgp.WrapError(err, "TeamId") + return + } + err = en.WriteString(z.UserId) + if err != nil { + err = msgp.WrapError(err, "UserId") + return + } + err = en.WriteString(z.Roles) + if err != nil { + err = msgp.WrapError(err, "Roles") + return + } + err = en.WriteInt64(z.DeleteAt) + if err != nil { + err = msgp.WrapError(err, "DeleteAt") + return + } + err = en.WriteBool(z.SchemeGuest) + if err != nil { + err = msgp.WrapError(err, "SchemeGuest") + return + } + err = en.WriteBool(z.SchemeUser) + if err != nil { + err = msgp.WrapError(err, "SchemeUser") + return + } + err = en.WriteBool(z.SchemeAdmin) + if err != nil { + err = msgp.WrapError(err, "SchemeAdmin") + return + } + err = en.WriteString(z.ExplicitRoles) + if err != nil { + err = msgp.WrapError(err, "ExplicitRoles") + return + } + err = en.WriteInt64(z.CreateAt) + if err != nil { + err = msgp.WrapError(err, "CreateAt") + return + } + return +} + +// MarshalMsg implements msgp.Marshaler +func (z *TeamMember) MarshalMsg(b []byte) (o []byte, err error) { + o = msgp.Require(b, z.Msgsize()) + // array header, size 9 + o = append(o, 0x99) + o = msgp.AppendString(o, z.TeamId) + o = msgp.AppendString(o, z.UserId) + o = msgp.AppendString(o, z.Roles) + o = msgp.AppendInt64(o, z.DeleteAt) + o = msgp.AppendBool(o, z.SchemeGuest) + o = msgp.AppendBool(o, z.SchemeUser) + o = msgp.AppendBool(o, z.SchemeAdmin) + o = msgp.AppendString(o, z.ExplicitRoles) + o = msgp.AppendInt64(o, z.CreateAt) + return +} + +// UnmarshalMsg implements msgp.Unmarshaler +func (z *TeamMember) UnmarshalMsg(bts []byte) (o []byte, err error) { + var zb0001 uint32 + zb0001, bts, err = msgp.ReadArrayHeaderBytes(bts) + if err != nil { + err = msgp.WrapError(err) + return + } + if zb0001 != 9 { + err = msgp.ArrayError{Wanted: 9, Got: zb0001} + return + } + z.TeamId, bts, err = msgp.ReadStringBytes(bts) + if err != nil { + err = msgp.WrapError(err, "TeamId") + return + } + z.UserId, bts, err = msgp.ReadStringBytes(bts) + if err != nil { + err = msgp.WrapError(err, "UserId") + return + } + z.Roles, bts, err = msgp.ReadStringBytes(bts) + if err != nil { + err = msgp.WrapError(err, "Roles") + return + } + z.DeleteAt, bts, err = msgp.ReadInt64Bytes(bts) + if err != nil { + err = msgp.WrapError(err, "DeleteAt") + return + } + z.SchemeGuest, bts, err = msgp.ReadBoolBytes(bts) + if err != nil { + err = msgp.WrapError(err, "SchemeGuest") + return + } + z.SchemeUser, bts, err = msgp.ReadBoolBytes(bts) + if err != nil { + err = msgp.WrapError(err, "SchemeUser") + return + } + z.SchemeAdmin, bts, err = msgp.ReadBoolBytes(bts) + if err != nil { + err = msgp.WrapError(err, "SchemeAdmin") + return + } + z.ExplicitRoles, bts, err = msgp.ReadStringBytes(bts) + if err != nil { + err = msgp.WrapError(err, "ExplicitRoles") + return + } + z.CreateAt, bts, err = msgp.ReadInt64Bytes(bts) + if err != nil { + err = msgp.WrapError(err, "CreateAt") + return + } + o = bts + return +} + +// Msgsize returns an upper bound estimate of the number of bytes occupied by the serialized message +func (z *TeamMember) Msgsize() (s int) { + s = 1 + msgp.StringPrefixSize + len(z.TeamId) + msgp.StringPrefixSize + len(z.UserId) + msgp.StringPrefixSize + len(z.Roles) + msgp.Int64Size + msgp.BoolSize + msgp.BoolSize + msgp.BoolSize + msgp.StringPrefixSize + len(z.ExplicitRoles) + msgp.Int64Size + return +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/team_search.go b/vendor/github.com/mattermost/mattermost/server/public/model/team_search.go new file mode 100644 index 00000000..c4a39275 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/team_search.go @@ -0,0 +1,22 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +type TeamSearch struct { + Term string `json:"term"` + Page *int `json:"page,omitempty"` + PerPage *int `json:"per_page,omitempty"` + AllowOpenInvite *bool `json:"allow_open_invite,omitempty"` + GroupConstrained *bool `json:"group_constrained,omitempty"` + IncludeGroupConstrained *bool `json:"include_group_constrained,omitempty"` + PolicyID *string `json:"policy_id,omitempty"` + ExcludePolicyConstrained *bool `json:"exclude_policy_constrained,omitempty"` + IncludePolicyID *bool `json:"-"` + IncludeDeleted *bool `json:"-"` + TeamType *string `json:"-"` +} + +func (t *TeamSearch) IsPaginated() bool { + return t.Page != nil && t.PerPage != nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/team_stats.go b/vendor/github.com/mattermost/mattermost/server/public/model/team_stats.go new file mode 100644 index 00000000..0a3a7387 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/team_stats.go @@ -0,0 +1,10 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +type TeamStats struct { + TeamId string `json:"team_id"` + TotalMemberCount int64 `json:"total_member_count"` + ActiveMemberCount int64 `json:"active_member_count"` +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/terms_of_service.go b/vendor/github.com/mattermost/mattermost/server/public/model/terms_of_service.go new file mode 100644 index 00000000..26bc6a1d --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/terms_of_service.go @@ -0,0 +1,54 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "fmt" + "net/http" + "unicode/utf8" +) + +type TermsOfService struct { + Id string `json:"id"` + CreateAt int64 `json:"create_at"` + UserId string `json:"user_id"` + Text string `json:"text"` +} + +func (t *TermsOfService) IsValid() *AppError { + if !IsValidId(t.Id) { + return InvalidTermsOfServiceError("id", "") + } + + if t.CreateAt == 0 { + return InvalidTermsOfServiceError("create_at", t.Id) + } + + if !IsValidId(t.UserId) { + return InvalidTermsOfServiceError("user_id", t.Id) + } + + if utf8.RuneCountInString(t.Text) > PostMessageMaxRunesV2 { + return InvalidTermsOfServiceError("text", t.Id) + } + + return nil +} + +func InvalidTermsOfServiceError(fieldName string, termsOfServiceId string) *AppError { + id := fmt.Sprintf("model.terms_of_service.is_valid.%s.app_error", fieldName) + details := "" + if termsOfServiceId != "" { + details = "terms_of_service_id=" + termsOfServiceId + } + return NewAppError("TermsOfService.IsValid", id, map[string]any{"MaxLength": PostMessageMaxRunesV2}, details, http.StatusBadRequest) +} + +func (t *TermsOfService) PreSave() { + if t.Id == "" { + t.Id = NewId() + } + + t.CreateAt = GetMillis() +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/thread.go b/vendor/github.com/mattermost/mattermost/server/public/model/thread.go new file mode 100644 index 00000000..eccaefc5 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/thread.go @@ -0,0 +1,150 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import "net/http" + +// Thread tracks the metadata associated with a root post and its reply posts. +// +// Note that Thread metadata does not exist until the first reply to a root post. +type Thread struct { + // PostId is the root post of the thread. + PostId string `json:"id"` + + // ChannelId is the channel in which the thread was posted. + ChannelId string `json:"channel_id"` + + // ReplyCount is the number of replies to the thread (excluding deleted posts). + ReplyCount int64 `json:"reply_count"` + + // LastReplyAt is the timestamp of the most recent post to the thread. + LastReplyAt int64 `json:"last_reply_at"` + + // Participants is a list of user ids that have replied to the thread, sorted by the oldest + // to newest. Note that the root post author is not included in this list until they reply. + Participants StringArray `json:"participants"` + + // DeleteAt is a denormalized copy of the root posts's DeleteAt. In the database, it's + // named ThreadDeleteAt to avoid introducing a query conflict with older server versions. + DeleteAt int64 `json:"delete_at"` + + // TeamId is a denormalized copy of the Channel's teamId. In the database, it's + // named ThreadTeamId to avoid introducing a query conflict with older server versions. + TeamId string `json:"team_id"` +} + +type ThreadResponse struct { + PostId string `json:"id"` + ReplyCount int64 `json:"reply_count"` + LastReplyAt int64 `json:"last_reply_at"` + LastViewedAt int64 `json:"last_viewed_at"` + Participants []*User `json:"participants"` + Post *Post `json:"post"` + UnreadReplies int64 `json:"unread_replies"` + UnreadMentions int64 `json:"unread_mentions"` + IsUrgent bool `json:"is_urgent"` + DeleteAt int64 `json:"delete_at"` +} + +type Threads struct { + Total int64 `json:"total"` + TotalUnreadThreads int64 `json:"total_unread_threads"` + TotalUnreadMentions int64 `json:"total_unread_mentions"` + TotalUnreadUrgentMentions int64 `json:"total_unread_urgent_mentions"` + Threads []*ThreadResponse `json:"threads"` +} + +type GetUserThreadsOpts struct { + // PageSize specifies the size of the returned chunk of results. Default = 30 + PageSize uint64 + + // Extended will enrich the response with participant details. Default = false + Extended bool + + // Deleted will specify that even deleted threads should be returned (For mobile sync). Default = false + Deleted bool + + // Since filters the threads based on their LastUpdateAt timestamp. + Since uint64 + + // Before specifies thread id as a cursor for pagination and will return `PageSize` threads before the cursor + Before string + + // After specifies thread id as a cursor for pagination and will return `PageSize` threads after the cursor + After string + + // Unread will make sure that only threads with unread replies are returned + Unread bool + + // TotalsOnly will not fetch any threads and just fetch the total counts + TotalsOnly bool + + // ThreadsOnly will fetch threads but not calculate totals and will return 0 + ThreadsOnly bool + + // TeamOnly will only fetch threads and unreads for the specified team and excludes DMs/GMs + TeamOnly bool + + // IncludeIsUrgent will return IsUrgent field as well to assert is the thread is urgent or not + IncludeIsUrgent bool + + ExcludeDirect bool +} + +func (o *Thread) Etag() string { + return Etag(o.PostId, o.LastReplyAt) +} + +// ThreadMembership models the relationship between a user and a thread of posts, with a similar +// data structure as ChannelMembership. +type ThreadMembership struct { + // PostId is the root post id of the thread in question. + PostId string `json:"post_id"` + + // UserId is the user whose membership in the thread is being tracked. + UserId string `json:"user_id"` + + // Following tracks whether the user is following the given thread. This defaults to true + // when a ThreadMembership record is created (a record doesn't exist until the user first + // starts following the thread), but the user can stop following or resume following at + // will. + Following bool `json:"following"` + + // LastUpdated is either the creation time of the membership record, or the last time the + // membership record was changed (e.g. started/stopped following, viewed thread, mention + // count change). + // + // This field is used to constrain queries of thread memberships to those updated after + // a given timestamp (e.g. on websocket reconnect). It's also used as the time column for + // deletion decisions during any configured retention policy. + LastUpdated int64 `json:"last_update_at"` + + // LastViewed is the last time the user viewed this thread. It is the thread analogue to + // the ChannelMembership's LastViewedAt and is used to decide when there are new replies + // for the user and where the user should start reading. + LastViewed int64 `json:"last_view_at"` + + // UnreadMentions is the number of unseen at-mentions for the user in the given thread. It + // is the thread analogue to the ChannelMembership's MentionCount, and is used to highlight + // threads with the mention count. + UnreadMentions int64 `json:"unread_mentions"` +} + +func (o *ThreadMembership) IsValid() *AppError { + if !IsValidId(o.PostId) { + return NewAppError("ThreadMembership.IsValid", "model.thread.is_valid.post_id.app_error", nil, "", http.StatusBadRequest) + } + + if !IsValidId(o.UserId) { + return NewAppError("ThreadMembership.IsValid", "model.thread.is_valid.user_id.app_error", nil, "", http.StatusBadRequest) + } + + return nil +} + +type ThreadMembershipForExport struct { + Username string `json:"user_name"` + LastViewed int64 `json:"last_viewed"` + UnreadMentions int64 `json:"unread_mentions"` +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/token.go b/vendor/github.com/mattermost/mattermost/server/public/model/token.go new file mode 100644 index 00000000..731f618e --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/token.go @@ -0,0 +1,48 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "net/http" +) + +const ( + TokenSize = 64 + MaxTokenExipryTime = 1000 * 60 * 60 * 48 // 48 hour + TokenTypeOAuth = "oauth" + TokenTypeSaml = "saml" + TokenTypeSSOCodeExchange = "sso-code-exchange" +) + +type Token struct { + Token string + CreateAt int64 + Type string + Extra string +} + +func NewToken(tokentype, extra string) *Token { + return &Token{ + Token: NewRandomString(TokenSize), + CreateAt: GetMillis(), + Type: tokentype, + Extra: extra, + } +} + +func (t *Token) IsValid() *AppError { + if len(t.Token) != TokenSize { + return NewAppError("Token.IsValid", "model.token.is_valid.size", nil, "", http.StatusInternalServerError) + } + + if t.CreateAt == 0 { + return NewAppError("Token.IsValid", "model.token.is_valid.expiry", nil, "", http.StatusInternalServerError) + } + + return nil +} + +func (t *Token) IsExpired() bool { + return GetMillis() > (t.CreateAt + MaxTokenExipryTime) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/typing_request.go b/vendor/github.com/mattermost/mattermost/server/public/model/typing_request.go new file mode 100644 index 00000000..f7a34341 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/typing_request.go @@ -0,0 +1,9 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +type TypingRequest struct { + ChannelId string `json:"channel_id"` + ParentId string `json:"parent_id"` +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/upload_session.go b/vendor/github.com/mattermost/mattermost/server/public/model/upload_session.go new file mode 100644 index 00000000..7bfe4248 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/upload_session.go @@ -0,0 +1,127 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "fmt" + "net/http" +) + +// UploadType defines the type of an upload. +type UploadType string + +const ( + UploadTypeAttachment UploadType = "attachment" + UploadTypeImport UploadType = "import" + IncompleteUploadSuffix = ".tmp" +) + +// UploadNoUserID is a "fake" user id used by the API layer when in local mode. +const UploadNoUserID = "nouser" + +// UploadSession contains information used to keep track of a file upload. +type UploadSession struct { + // The unique identifier for the session. + Id string `json:"id"` + // The type of the upload. + Type UploadType `json:"type"` + // The timestamp of creation. + CreateAt int64 `json:"create_at"` + // The id of the user performing the upload. + UserId string `json:"user_id"` + // The id of the channel to upload to. + ChannelId string `json:"channel_id,omitempty"` + // The name of the file to upload. + Filename string `json:"filename"` + // The path where the file is stored. + Path string `json:"-"` + // The size of the file to upload. + FileSize int64 `json:"file_size"` + // The amount of received data in bytes. If equal to FileSize it means the + // upload has finished. + FileOffset int64 `json:"file_offset"` + // Id of remote cluster if uploading for shared channel + RemoteId string `json:"remote_id"` + // Requested file id if uploading for shared channel + ReqFileId string `json:"req_file_id"` +} + +func (us *UploadSession) Auditable() map[string]any { + return map[string]any{ + "id": us.Id, + "type": us.Type, + "user_id": us.UserId, + "channel_id": us.ChannelId, + "filename": us.Filename, + "file_size": us.FileSize, + "remote_id": us.RemoteId, + "ReqFileId": us.ReqFileId, + } +} + +// PreSave is a utility function used to fill required information. +func (us *UploadSession) PreSave() { + if us.Id == "" { + us.Id = NewId() + } + + if us.CreateAt == 0 { + us.CreateAt = GetMillis() + } +} + +// IsValid validates an UploadType. It returns an error in case of +// failure. +func (t UploadType) IsValid() error { + switch t { + case UploadTypeAttachment: + return nil + case UploadTypeImport: + return nil + default: + } + return fmt.Errorf("invalid UploadType %s", t) +} + +// IsValid validates an UploadSession. It returns an error in case of +// failure. +func (us *UploadSession) IsValid() *AppError { + if !IsValidId(us.Id) { + return NewAppError("UploadSession.IsValid", "model.upload_session.is_valid.id.app_error", nil, "", http.StatusBadRequest) + } + + if err := us.Type.IsValid(); err != nil { + return NewAppError("UploadSession.IsValid", "model.upload_session.is_valid.type.app_error", nil, "", http.StatusBadRequest).Wrap(err) + } + + if !IsValidId(us.UserId) && us.UserId != UploadNoUserID { + return NewAppError("UploadSession.IsValid", "model.upload_session.is_valid.user_id.app_error", nil, "id="+us.Id, http.StatusBadRequest) + } + + if us.Type == UploadTypeAttachment && !IsValidId(us.ChannelId) { + return NewAppError("UploadSession.IsValid", "model.upload_session.is_valid.channel_id.app_error", nil, "id="+us.Id, http.StatusBadRequest) + } + + if us.CreateAt == 0 { + return NewAppError("UploadSession.IsValid", "model.upload_session.is_valid.create_at.app_error", nil, "id="+us.Id, http.StatusBadRequest) + } + + if us.Filename == "" { + return NewAppError("UploadSession.IsValid", "model.upload_session.is_valid.filename.app_error", nil, "id="+us.Id, http.StatusBadRequest) + } + + if us.FileSize <= 0 { + return NewAppError("UploadSession.IsValid", "model.upload_session.is_valid.file_size.app_error", nil, "id="+us.Id, http.StatusBadRequest) + } + + if us.FileOffset < 0 || us.FileOffset > us.FileSize { + return NewAppError("UploadSession.IsValid", "model.upload_session.is_valid.file_offset.app_error", nil, "id="+us.Id, http.StatusBadRequest) + } + + if us.Path == "" { + return NewAppError("UploadSession.IsValid", "model.upload_session.is_valid.path.app_error", nil, "id="+us.Id, http.StatusBadRequest) + } + + return nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/usage.go b/vendor/github.com/mattermost/mattermost/server/public/model/usage.go new file mode 100644 index 00000000..1845f2d0 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/usage.go @@ -0,0 +1,35 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +type PostsUsage struct { + Count int64 `json:"count"` +} + +type StorageUsage struct { + Bytes int64 `json:"bytes"` +} + +type TeamsUsage struct { + Active int64 `json:"active"` + CloudArchived int64 `json:"cloud_archived"` +} + +var InstalledIntegrationsIgnoredPlugins = map[string]struct{}{ + PluginIdPlaybooks: {}, + PluginIdFocalboard: {}, + PluginIdApps: {}, + PluginIdCalls: {}, + PluginIdNPS: {}, + PluginIdChannelExport: {}, + PluginIdAI: {}, +} + +type InstalledIntegration struct { + Type string `json:"type"` // "plugin" or "app" + ID string `json:"id"` + Name string `json:"name"` + Version string `json:"version"` + Enabled bool `json:"enabled"` +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/user.go b/vendor/github.com/mattermost/mattermost/server/public/model/user.go new file mode 100644 index 00000000..3e2ec00e --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/user.go @@ -0,0 +1,1111 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "crypto/sha256" + "encoding/json" + "fmt" + "net/http" + "regexp" + "slices" + "sort" + "strings" + "time" + "unicode/utf8" + + "github.com/pkg/errors" + + "golang.org/x/text/language" + + "github.com/mattermost/mattermost/server/public/shared/mlog" + "github.com/mattermost/mattermost/server/public/shared/timezones" + "github.com/mattermost/mattermost/server/v8/channels/app/password/hashers" +) + +const ( + Me = "me" + UserNotifyAll = "all" + UserNotifyHere = "here" + UserNotifyMention = "mention" + UserNotifyNone = "none" + DesktopNotifyProp = "desktop" + DesktopSoundNotifyProp = "desktop_sound" + MarkUnreadNotifyProp = "mark_unread" + PushNotifyProp = "push" + PushStatusNotifyProp = "push_status" + EmailNotifyProp = "email" + ChannelMentionsNotifyProp = "channel" + CommentsNotifyProp = "comments" + MentionKeysNotifyProp = "mention_keys" + HighlightsNotifyProp = "highlight_keys" + CommentsNotifyNever = "never" + CommentsNotifyRoot = "root" + CommentsNotifyAny = "any" + CommentsNotifyCRT = "crt" + FirstNameNotifyProp = "first_name" + AutoResponderActiveNotifyProp = "auto_responder_active" + AutoResponderMessageNotifyProp = "auto_responder_message" + DesktopThreadsNotifyProp = "desktop_threads" + PushThreadsNotifyProp = "push_threads" + EmailThreadsNotifyProp = "email_threads" + + DefaultLocale = "en" + UserAuthServiceEmail = "email" + + UserEmailMaxLength = 128 + UserNicknameMaxRunes = 64 + UserPositionMaxRunes = 128 + UserFirstNameMaxRunes = 64 + UserLastNameMaxRunes = 64 + UserAuthDataMaxLength = 128 + UserNameMaxLength = 64 + UserNameMinLength = 1 + UserPasswordMaxLength = 72 + UserLocaleMaxLength = 5 + UserTimezoneMaxRunes = 256 + UserRolesMaxLength = 256 + + DesktopTokenTTL = time.Minute * 3 +) + +//msgp:tuple User + +// User contains the details about the user. +// This struct's serializer methods are auto-generated. If a new field is added/removed, +// please run make gen-serialized. +type User struct { + Id string `json:"id"` + CreateAt int64 `json:"create_at,omitempty"` + UpdateAt int64 `json:"update_at,omitempty"` + DeleteAt int64 `json:"delete_at"` + Username string `json:"username"` + Password string `json:"password,omitempty"` + AuthData *string `json:"auth_data,omitempty"` + AuthService string `json:"auth_service"` + Email string `json:"email"` + EmailVerified bool `json:"email_verified,omitempty"` + Nickname string `json:"nickname"` + FirstName string `json:"first_name"` + LastName string `json:"last_name"` + Position string `json:"position"` + Roles string `json:"roles"` + AllowMarketing bool `json:"allow_marketing,omitempty"` + Props StringMap `json:"props,omitempty"` + NotifyProps StringMap `json:"notify_props,omitempty"` + LastPasswordUpdate int64 `json:"last_password_update,omitempty"` + LastPictureUpdate int64 `json:"last_picture_update,omitempty"` + FailedAttempts int `json:"failed_attempts,omitempty"` + Locale string `json:"locale"` + Timezone StringMap `json:"timezone"` + MfaActive bool `json:"mfa_active,omitempty"` + MfaSecret string `json:"mfa_secret,omitempty"` + RemoteId *string `json:"remote_id,omitempty"` + LastActivityAt int64 `json:"last_activity_at,omitempty"` + IsBot bool `json:"is_bot,omitempty"` + BotDescription string `json:"bot_description,omitempty"` + BotLastIconUpdate int64 `json:"bot_last_icon_update,omitempty"` + TermsOfServiceId string `json:"terms_of_service_id,omitempty"` + TermsOfServiceCreateAt int64 `json:"terms_of_service_create_at,omitempty"` + DisableWelcomeEmail bool `json:"disable_welcome_email"` + LastLogin int64 `json:"last_login,omitempty"` + MfaUsedTimestamps StringArray `json:"mfa_used_timestamps,omitempty"` +} + +func (u *User) Auditable() map[string]any { + return map[string]any{ + "id": u.Id, + "create_at": u.CreateAt, + "update_at": u.UpdateAt, + "delete_at": u.DeleteAt, + "username": u.Username, + "auth_service": u.AuthService, + "email": u.Email, + "email_verified": u.EmailVerified, + "position": u.Position, + "roles": u.Roles, + "allow_marketing": u.AllowMarketing, + "props": u.Props, + "notify_props": u.NotifyProps, + "last_password_update": u.LastPasswordUpdate, + "last_picture_update": u.LastPictureUpdate, + "failed_attempts": u.FailedAttempts, + "locale": u.Locale, + "timezone": u.Timezone, + "mfa_active": u.MfaActive, + "remote_id": u.GetRemoteID(), + "last_activity_at": u.LastActivityAt, + "is_bot": u.IsBot, + "bot_description": u.BotDescription, + "bot_last_icon_update": u.BotLastIconUpdate, + "terms_of_service_id": u.TermsOfServiceId, + "terms_of_service_create_at": u.TermsOfServiceCreateAt, + "disable_welcome_email": u.DisableWelcomeEmail, + } +} + +func (u *User) LogClone() any { + return map[string]any{ + "id": u.Id, + "create_at": u.CreateAt, + "update_at": u.UpdateAt, + "delete_at": u.DeleteAt, + "username": u.Username, + "auth_data": u.GetAuthData(), + "auth_service": u.AuthService, + "email": u.Email, + "email_verified": u.EmailVerified, + "position": u.Position, + "roles": u.Roles, + "allow_marketing": u.AllowMarketing, + "props": u.Props, + "notify_props": u.NotifyProps, + "locale": u.Locale, + "timezone": u.Timezone, + "mfa_active": u.MfaActive, + "remote_id": u.GetRemoteID(), + } +} + +//msgp UserMap + +// UserMap is a map from a userId to a user object. +// It is used to generate methods which can be used for fast serialization/de-serialization. +type UserMap map[string]*User + +//msgp:ignore UserUpdate +type UserUpdate struct { + Old *User + New *User +} + +//msgp:ignore UserPatch +type UserPatch struct { + Username *string `json:"username"` + Password *string `json:"password,omitempty"` + Nickname *string `json:"nickname"` + FirstName *string `json:"first_name"` + LastName *string `json:"last_name"` + Position *string `json:"position"` + Email *string `json:"email"` + Props StringMap `json:"props,omitempty"` + NotifyProps StringMap `json:"notify_props,omitempty"` + Locale *string `json:"locale"` + Timezone StringMap `json:"timezone"` + RemoteId *string `json:"remote_id"` +} + +func (u *UserPatch) Auditable() map[string]any { + return map[string]any{ + "username": u.Username, + "nickname": u.Nickname, + "first_name": u.FirstName, + "last_name": u.LastName, + "position": u.Position, + "email": u.Email, + "props": u.Props, + "notify_props": u.NotifyProps, + "locale": u.Locale, + "timezone": u.Timezone, + "remote_id": u.RemoteId, + } +} + +//msgp:ignore UserAuth +type UserAuth struct { + AuthData *string `json:"auth_data,omitempty"` + AuthService string `json:"auth_service,omitempty"` +} + +func (u *UserAuth) Auditable() map[string]any { + return map[string]any{ + "auth_service": u.AuthService, + } +} + +//msgp:ignore UserForIndexing +type UserForIndexing struct { + Id string `json:"id"` + Username string `json:"username"` + Nickname string `json:"nickname"` + FirstName string `json:"first_name"` + LastName string `json:"last_name"` + Roles string `json:"roles"` + CreateAt int64 `json:"create_at"` + DeleteAt int64 `json:"delete_at"` + TeamsIds []string `json:"team_id"` + ChannelsIds []string `json:"channel_id"` +} + +//msgp:ignore ViewUsersRestrictions +type ViewUsersRestrictions struct { + Teams []string + Channels []string +} + +//msgp:ignore GetUsersNotInChannelOptions +type GetUsersNotInChannelOptions struct { + TeamID string `json:"team_id"` + // Page-based pagination (used for non-ABAC channels) + // This will be discarded if the channel has an ABAC policy and CursorID will be used. + Page int `json:"page"` + Limit int `json:"limit"` + // Cursor-based pagination (used for ABAC channels) + // If CursorID is empty for ABAC channels, it will start from the beginning + CursorID string `json:"cursor_id"` + Etag string `json:"etag"` +} + +func (r *ViewUsersRestrictions) Hash() string { + if r == nil { + return "" + } + ids := append(r.Teams, r.Channels...) + sort.Strings(ids) + hash := sha256.New() + hash.Write([]byte(strings.Join(ids, ""))) + return fmt.Sprintf("%x", hash.Sum(nil)) +} + +//msgp:ignore UserSlice +type UserSlice []*User + +func (u UserSlice) Usernames() []string { + usernames := []string{} + for _, user := range u { + usernames = append(usernames, user.Username) + } + sort.Strings(usernames) + return usernames +} + +func (u UserSlice) IDs() []string { + ids := []string{} + for _, user := range u { + ids = append(ids, user.Id) + } + return ids +} + +func (u UserSlice) FilterWithoutBots() UserSlice { + var matches []*User + + for _, user := range u { + if !user.IsBot { + matches = append(matches, user) + } + } + return UserSlice(matches) +} + +func (u UserSlice) FilterByActive(active bool) UserSlice { + var matches []*User + + for _, user := range u { + if user.DeleteAt == 0 && active { + matches = append(matches, user) + } else if user.DeleteAt != 0 && !active { + matches = append(matches, user) + } + } + return UserSlice(matches) +} + +func (u UserSlice) FilterByID(ids []string) UserSlice { + var matches []*User + for _, user := range u { + for _, id := range ids { + if id == user.Id { + matches = append(matches, user) + } + } + } + return UserSlice(matches) +} + +func (u UserSlice) FilterWithoutID(ids []string) UserSlice { + var keep []*User + for _, user := range u { + present := false + for _, id := range ids { + if id == user.Id { + present = true + } + } + if !present { + keep = append(keep, user) + } + } + return UserSlice(keep) +} + +func (u *User) DeepCopy() *User { + copyUser := *u + if u.AuthData != nil { + copyUser.AuthData = NewPointer(*u.AuthData) + } + if u.Props != nil { + copyUser.Props = CopyStringMap(u.Props) + } + if u.NotifyProps != nil { + copyUser.NotifyProps = CopyStringMap(u.NotifyProps) + } + if u.Timezone != nil { + copyUser.Timezone = CopyStringMap(u.Timezone) + } + return ©User +} + +// IsValid validates the user and returns an error if it isn't configured +// correctly. +func (u *User) IsValid() *AppError { + if !IsValidId(u.Id) { + return InvalidUserError("id", "", u.Id) + } + + if u.CreateAt == 0 { + return InvalidUserError("create_at", u.Id, u.CreateAt) + } + + if u.UpdateAt == 0 { + return InvalidUserError("update_at", u.Id, u.UpdateAt) + } + + if u.IsRemote() { + if !IsValidUsernameAllowRemote(u.Username) { + return InvalidUserError("username", u.Id, u.Username) + } + } else { + if !IsValidUsername(u.Username) { + return InvalidUserError("username", u.Id, u.Username) + } + } + + if len(u.Email) > UserEmailMaxLength || u.Email == "" || (!IsValidEmail(u.Email) && !u.IsRemote()) { + return InvalidUserError("email", u.Id, u.Email) + } + + if utf8.RuneCountInString(u.Nickname) > UserNicknameMaxRunes { + return InvalidUserError("nickname", u.Id, u.Nickname) + } + + if utf8.RuneCountInString(u.Position) > UserPositionMaxRunes { + return InvalidUserError("position", u.Id, u.Position) + } + + if utf8.RuneCountInString(u.FirstName) > UserFirstNameMaxRunes { + return InvalidUserError("first_name", u.Id, u.FirstName) + } + + if utf8.RuneCountInString(u.LastName) > UserLastNameMaxRunes { + return InvalidUserError("last_name", u.Id, u.LastName) + } + + if u.AuthData != nil && len(*u.AuthData) > UserAuthDataMaxLength { + return InvalidUserError("auth_data", u.Id, u.AuthData) + } + + if u.AuthData != nil && *u.AuthData != "" && u.AuthService == "" { + return InvalidUserError("auth_data_type", u.Id, *u.AuthData+" "+u.AuthService) + } + + if u.Password != "" && u.AuthData != nil && *u.AuthData != "" { + return InvalidUserError("auth_data_pwd", u.Id, *u.AuthData) + } + + if !IsValidLocale(u.Locale) { + return InvalidUserError("locale", u.Id, u.Locale) + } + + if len(u.Timezone) > 0 { + if tzJSON, err := json.Marshal(u.Timezone); err != nil { + return NewAppError("User.IsValid", "model.user.is_valid.marshal.app_error", nil, "", http.StatusInternalServerError).Wrap(err) + } else if utf8.RuneCount(tzJSON) > UserTimezoneMaxRunes { + return InvalidUserError("timezone_limit", u.Id, u.Timezone) + } + } + + if len(u.Roles) > UserRolesMaxLength { + return NewAppError("User.IsValid", "model.user.is_valid.roles_limit.app_error", + map[string]any{"Limit": UserRolesMaxLength}, "user_id="+u.Id+" roles_limit="+u.Roles, http.StatusBadRequest) + } + + if u.Props != nil { + if !u.ValidateCustomStatus() { + return NewAppError("User.IsValid", "model.user.is_valid.invalidProperty.app_error", + map[string]any{"Props": u.Props}, "user_id="+u.Id, http.StatusBadRequest) + } + } + + return nil +} + +func InvalidUserError(fieldName, userId string, fieldValue any) *AppError { + id := fmt.Sprintf("model.user.is_valid.%s.app_error", fieldName) + details := "" + if userId != "" { + details = "user_id=" + userId + } + details += fmt.Sprintf(" %s=%v", fieldName, fieldValue) + return NewAppError("User.IsValid", id, nil, details, http.StatusBadRequest) +} + +func NormalizeUsername(username string) string { + return strings.ToLower(username) +} + +func NormalizeEmail(email string) string { + return strings.ToLower(email) +} + +// PreSave will set the Id and Username if missing. It will also fill +// in the CreateAt, UpdateAt times. It will also hash the password. It should +// be run before saving the user to the db. +func (u *User) PreSave() *AppError { + if u.Id == "" { + u.Id = NewId() + } + + if u.Username == "" { + u.Username = NewUsername() + } + + if u.AuthData != nil && *u.AuthData == "" { + u.AuthData = nil + } + + u.Username = SanitizeUnicode(u.Username) + u.FirstName = SanitizeUnicode(u.FirstName) + u.LastName = SanitizeUnicode(u.LastName) + u.Nickname = SanitizeUnicode(u.Nickname) + + u.Username = NormalizeUsername(u.Username) + u.Email = NormalizeEmail(u.Email) + + if u.CreateAt == 0 { + u.CreateAt = GetMillis() + } + u.UpdateAt = u.CreateAt + + u.LastPasswordUpdate = u.CreateAt + + u.MfaActive = false + + if u.Locale == "" { + u.Locale = DefaultLocale + } + + if u.Props == nil { + u.Props = make(map[string]string) + } + + if len(u.NotifyProps) == 0 { + u.SetDefaultNotifications() + } + + if u.Timezone == nil { + u.Timezone = timezones.DefaultUserTimezone() + } + + if u.Password != "" { + hashed, err := hashers.Hash(u.Password) + if errors.Is(err, hashers.ErrPasswordTooLong) { + return NewAppError("User.PreSave", "model.user.pre_save.password_too_long.app_error", + nil, "user_id="+u.Id, http.StatusBadRequest).Wrap(err) + } else if err != nil { + return NewAppError("User.PreSave", "model.user.pre_save.password_hash.app_error", + nil, "user_id="+u.Id, http.StatusBadRequest).Wrap(err) + } + u.Password = hashed + } + + cs := u.GetCustomStatus() + if cs != nil { + cs.PreSave() + u.SetCustomStatus(cs) + } + + return nil +} + +// PreUpdate should be run before updating the user in the db. +func (u *User) PreUpdate() { + u.Username = SanitizeUnicode(u.Username) + u.FirstName = SanitizeUnicode(u.FirstName) + u.LastName = SanitizeUnicode(u.LastName) + u.Nickname = SanitizeUnicode(u.Nickname) + u.BotDescription = SanitizeUnicode(u.BotDescription) + + u.Username = NormalizeUsername(u.Username) + u.Email = NormalizeEmail(u.Email) + u.UpdateAt = GetMillis() + + u.FirstName = SanitizeUnicode(u.FirstName) + u.LastName = SanitizeUnicode(u.LastName) + u.Nickname = SanitizeUnicode(u.Nickname) + u.BotDescription = SanitizeUnicode(u.BotDescription) + + if u.AuthData != nil && *u.AuthData == "" { + u.AuthData = nil + } + + if len(u.NotifyProps) == 0 { + u.SetDefaultNotifications() + } else if _, ok := u.NotifyProps[MentionKeysNotifyProp]; ok { + // Remove any blank mention keys + splitKeys := strings.Split(u.NotifyProps[MentionKeysNotifyProp], ",") + goodKeys := []string{} + for _, key := range splitKeys { + if key != "" { + goodKeys = append(goodKeys, strings.ToLower(key)) + } + } + u.NotifyProps[MentionKeysNotifyProp] = strings.Join(goodKeys, ",") + } + + if u.Props != nil { + cs := u.GetCustomStatus() + if cs != nil { + cs.PreSave() + u.SetCustomStatus(cs) + } + } +} + +func (u *User) SetDefaultNotifications() { + u.NotifyProps = make(map[string]string) + u.NotifyProps[EmailNotifyProp] = "true" + u.NotifyProps[PushNotifyProp] = UserNotifyMention + u.NotifyProps[DesktopNotifyProp] = UserNotifyMention + u.NotifyProps[DesktopSoundNotifyProp] = "true" + u.NotifyProps[MentionKeysNotifyProp] = "" + u.NotifyProps[ChannelMentionsNotifyProp] = "true" + u.NotifyProps[PushStatusNotifyProp] = StatusOnline + u.NotifyProps[CommentsNotifyProp] = CommentsNotifyNever + u.NotifyProps[FirstNameNotifyProp] = "false" + u.NotifyProps[DesktopThreadsNotifyProp] = UserNotifyAll + u.NotifyProps[EmailThreadsNotifyProp] = UserNotifyAll + u.NotifyProps[PushThreadsNotifyProp] = UserNotifyAll +} + +func (u *User) UpdateMentionKeysFromUsername(oldUsername string) { + nonUsernameKeys := []string{} + for _, key := range u.GetMentionKeys() { + if key != oldUsername && key != "@"+oldUsername { + nonUsernameKeys = append(nonUsernameKeys, key) + } + } + + u.NotifyProps[MentionKeysNotifyProp] = "" + if len(nonUsernameKeys) > 0 { + u.NotifyProps[MentionKeysNotifyProp] += "," + strings.Join(nonUsernameKeys, ",") + } +} + +func (u *User) GetMentionKeys() []string { + var keys []string + + for key := range strings.SplitSeq(u.NotifyProps[MentionKeysNotifyProp], ",") { + trimmedKey := strings.TrimSpace(key) + + if trimmedKey == "" { + continue + } + + keys = append(keys, trimmedKey) + } + + return keys +} + +func (u *User) Patch(patch *UserPatch) { + if patch.Username != nil { + u.Username = *patch.Username + } + + if patch.Nickname != nil { + u.Nickname = *patch.Nickname + } + + if patch.FirstName != nil { + u.FirstName = *patch.FirstName + } + + if patch.LastName != nil { + u.LastName = *patch.LastName + } + + if patch.Position != nil { + u.Position = *patch.Position + } + + if patch.Email != nil { + u.Email = *patch.Email + } + + if patch.Props != nil { + u.Props = patch.Props + } + + if patch.NotifyProps != nil { + u.NotifyProps = patch.NotifyProps + } + + if patch.Locale != nil { + u.Locale = *patch.Locale + } + + if patch.Timezone != nil { + u.Timezone = patch.Timezone + } + + if patch.RemoteId != nil { + u.RemoteId = patch.RemoteId + } +} + +// Generate a valid strong etag so the browser can cache the results +func (u *User) Etag(showFullName, showEmail bool) string { + return Etag(u.Id, u.UpdateAt, u.TermsOfServiceId, u.TermsOfServiceCreateAt, showFullName, showEmail, u.BotLastIconUpdate) +} + +// Remove any private data from the user object +func (u *User) Sanitize(options map[string]bool) { + u.Password = "" + u.MfaSecret = "" + u.MfaUsedTimestamps = nil + u.LastLogin = 0 + + if len(options) != 0 { + if !options["email"] { + u.Email = "" + delete(u.Props, UserPropsKeyRemoteEmail) + } + if !options["fullname"] { + u.FirstName = "" + u.LastName = "" + } + if !options["passwordupdate"] { + u.LastPasswordUpdate = 0 + } + if !options["authservice"] { + u.AuthService = "" + } + if !options["authdata"] { + u.AuthData = NewPointer("") + } + } +} + +// Remove any input data from the user object that is not user controlled +func (u *User) SanitizeInput(isAdmin bool) { + if !isAdmin { + u.AuthData = NewPointer("") + u.AuthService = "" + u.EmailVerified = false + } + u.RemoteId = NewPointer("") + u.CreateAt = 0 + u.UpdateAt = 0 + u.DeleteAt = 0 + u.LastPasswordUpdate = 0 + u.LastPictureUpdate = 0 + u.FailedAttempts = 0 + u.MfaActive = false + u.MfaSecret = "" + u.MfaUsedTimestamps = StringArray{} + u.Email = strings.TrimSpace(u.Email) + u.LastActivityAt = 0 +} + +func (u *User) ClearNonProfileFields(asAdmin bool) { + u.Password = "" + u.MfaSecret = "" + u.MfaUsedTimestamps = nil + u.EmailVerified = false + u.AllowMarketing = false + u.LastPasswordUpdate = 0 + + if !asAdmin { + u.AuthData = NewPointer("") + u.NotifyProps = StringMap{} + u.FailedAttempts = 0 + } +} + +func (u *User) SanitizeProfile(options map[string]bool, asAdmin bool) { + u.ClearNonProfileFields(asAdmin) + + u.Sanitize(options) +} + +func (u *User) MakeNonNil() { + if u.Props == nil { + u.Props = make(map[string]string) + } + + if u.NotifyProps == nil { + u.NotifyProps = make(map[string]string) + } +} + +func (u *User) AddNotifyProp(key string, value string) { + u.MakeNonNil() + + u.NotifyProps[key] = value +} + +func (u *User) SetCustomStatus(cs *CustomStatus) error { + u.MakeNonNil() + statusJSON, jsonErr := json.Marshal(cs) + if jsonErr != nil { + return jsonErr + } + u.Props[UserPropsKeyCustomStatus] = string(statusJSON) + return nil +} + +func (u *User) GetCustomStatus() *CustomStatus { + var o *CustomStatus + + data := u.Props[UserPropsKeyCustomStatus] + _ = json.Unmarshal([]byte(data), &o) + + return o +} + +func (u *User) CustomStatus() *CustomStatus { + var o *CustomStatus + + data := u.Props[UserPropsKeyCustomStatus] + _ = json.Unmarshal([]byte(data), &o) + + return o +} + +func (u *User) ClearCustomStatus() { + u.MakeNonNil() + u.Props[UserPropsKeyCustomStatus] = "" +} + +func (u *User) ValidateCustomStatus() bool { + status, exists := u.Props[UserPropsKeyCustomStatus] + if exists && status != "" { + cs := u.GetCustomStatus() + if cs == nil { + return false + } + } + return true +} + +func (u *User) GetFullName() string { + if u.FirstName != "" && u.LastName != "" { + return u.FirstName + " " + u.LastName + } else if u.FirstName != "" { + return u.FirstName + } else if u.LastName != "" { + return u.LastName + } + return "" +} + +func (u *User) getDisplayName(baseName, nameFormat string) string { + displayName := baseName + + if nameFormat == ShowNicknameFullName { + if u.Nickname != "" { + displayName = u.Nickname + } else if fullName := u.GetFullName(); fullName != "" { + displayName = fullName + } + } else if nameFormat == ShowFullName { + if fullName := u.GetFullName(); fullName != "" { + displayName = fullName + } + } + + return displayName +} + +func (u *User) GetDisplayName(nameFormat string) string { + displayName := u.Username + + return u.getDisplayName(displayName, nameFormat) +} + +func (u *User) GetDisplayNameWithPrefix(nameFormat, prefix string) string { + displayName := prefix + u.Username + + return u.getDisplayName(displayName, nameFormat) +} + +func (u *User) GetRoles() []string { + return strings.Fields(u.Roles) +} + +func (u *User) GetRawRoles() string { + return u.Roles +} + +func IsValidUserRoles(userRoles string) bool { + roles := strings.Fields(userRoles) + + for _, r := range roles { + if !IsValidRoleName(r) { + return false + } + } + + // Exclude just the system_admin role explicitly to prevent mistakes + if len(roles) == 1 && roles[0] == "system_admin" { + return false + } + + return true +} + +// Make sure you actually want to use this function. In context.go there are functions to check permissions +// This function should not be used to check permissions. +func (u *User) IsGuest() bool { + return IsInRole(u.Roles, SystemGuestRoleId) +} + +func (u *User) IsSystemAdmin() bool { + return IsInRole(u.Roles, SystemAdminRoleId) +} + +// Make sure you actually want to use this function. In context.go there are functions to check permissions +// This function should not be used to check permissions. +func (u *User) IsInRole(inRole string) bool { + return IsInRole(u.Roles, inRole) +} + +// Make sure you actually want to use this function. In context.go there are functions to check permissions +// This function should not be used to check permissions. +func IsInRole(userRoles string, inRole string) bool { + roles := strings.Split(userRoles, " ") + + return slices.Contains(roles, inRole) +} + +func (u *User) IsSSOUser() bool { + return u.AuthService != "" && u.AuthService != UserAuthServiceEmail +} + +func (u *User) IsOAuthUser() bool { + return u.AuthService == ServiceGitlab || + u.AuthService == ServiceGoogle || + u.AuthService == ServiceOffice365 || + u.AuthService == ServiceOpenid +} + +func (u *User) IsLDAPUser() bool { + return u.AuthService == UserAuthServiceLdap +} + +func (u *User) IsSAMLUser() bool { + return u.AuthService == UserAuthServiceSaml +} + +func (u *User) GetPreferredTimezone() string { + return GetPreferredTimezone(u.Timezone) +} + +func (u *User) GetTimezoneLocation() *time.Location { + loc, _ := time.LoadLocation(u.GetPreferredTimezone()) + if loc == nil { + loc = time.Now().UTC().Location() + } + return loc +} + +// IsRemote returns true if the user belongs to a remote cluster (has RemoteId). +func (u *User) IsRemote() bool { + return SafeDereference(u.RemoteId) != "" +} + +// GetRemoteID returns the remote id for this user or "" if not a remote user. +func (u *User) GetRemoteID() string { + return SafeDereference(u.RemoteId) +} + +func (u *User) GetOriginalRemoteID() string { + if u.Props == nil { + if u.IsRemote() { + return UserOriginalRemoteIdUnknown + } + return "" // Local user + } + if originalId, exists := u.Props[UserPropsKeyOriginalRemoteId]; exists && originalId != "" { + return originalId + } + if u.IsRemote() { + return UserOriginalRemoteIdUnknown + } + return "" // Local user +} + +func (u *User) GetAuthData() string { + return SafeDereference(u.AuthData) +} + +// GetProp fetches a prop value by name. +func (u *User) GetProp(name string) (string, bool) { + val, ok := u.Props[name] + return val, ok +} + +// SetProp sets a prop value by name, creating the map if nil. +// Not thread safe. +func (u *User) SetProp(name string, value string) { + if u.Props == nil { + u.Props = make(map[string]string) + } + u.Props[name] = value +} + +func (u *User) ToPatch() *UserPatch { + return &UserPatch{ + Username: &u.Username, Password: &u.Password, + Nickname: &u.Nickname, FirstName: &u.FirstName, LastName: &u.LastName, + Position: &u.Position, Email: &u.Email, + Props: u.Props, NotifyProps: u.NotifyProps, + Locale: &u.Locale, Timezone: u.Timezone, + } +} + +func (u *UserPatch) SetField(fieldName string, fieldValue string) { + switch fieldName { + case "FirstName": + u.FirstName = &fieldValue + case "LastName": + u.LastName = &fieldValue + case "Nickname": + u.Nickname = &fieldValue + case "Email": + u.Email = &fieldValue + case "Position": + u.Position = &fieldValue + case "Username": + u.Username = &fieldValue + } +} + +var validUsernameChars = regexp.MustCompile(`^[a-z0-9\.\-_]+$`) +var validUsernameCharsForRemote = regexp.MustCompile(`^[a-z0-9\.\-_:]*$`) + +var restrictedUsernames = map[string]struct{}{ + "all": {}, + "channel": {}, + "matterbot": {}, + "system": {}, +} + +func IsValidUsername(s string) bool { + if len(s) < UserNameMinLength || len(s) > UserNameMaxLength { + return false + } + + if !validUsernameChars.MatchString(s) { + return false + } + + _, found := restrictedUsernames[s] + return !found +} + +func IsValidUsernameAllowRemote(s string) bool { + if len(s) < UserNameMinLength || len(s) > UserNameMaxLength { + return false + } + + if !validUsernameCharsForRemote.MatchString(s) { + return false + } + + _, found := restrictedUsernames[s] + return !found +} + +func CleanUsername(logger mlog.LoggerIFace, username string) string { + s := NormalizeUsername(strings.Replace(username, " ", "-", -1)) + + for _, value := range reservedName { + if s == value { + s = strings.Replace(s, value, "", -1) + } + } + + s = strings.TrimSpace(s) + + for _, c := range s { + char := fmt.Sprintf("%c", c) + if !validUsernameChars.MatchString(char) { + s = strings.Replace(s, char, "-", -1) + } + } + + s = strings.Trim(s, "-") + + if !IsValidUsername(s) { + s = NewUsername() + logger.Warn("Generating new username since provided username was invalid", + mlog.String("provided_username", username), mlog.String("new_username", s)) + } + + return s +} + +func IsValidLocale(locale string) bool { + if locale != "" { + if len(locale) > UserLocaleMaxLength { + return false + } else if _, err := language.Parse(locale); err != nil { + return false + } + } + + return true +} + +//msgp:ignore UserWithGroups +type UserWithGroups struct { + User + GroupIDs *string `json:"-"` + Groups []*Group `json:"groups"` + SchemeGuest bool `json:"scheme_guest"` + SchemeUser bool `json:"scheme_user"` + SchemeAdmin bool `json:"scheme_admin"` +} + +func (u *UserWithGroups) GetGroupIDs() []string { + if u.GroupIDs == nil { + return nil + } + trimmed := strings.TrimSpace(*u.GroupIDs) + if trimmed == "" { + return nil + } + return strings.Split(trimmed, ",") +} + +//msgp:ignore UsersWithGroupsAndCount +type UsersWithGroupsAndCount struct { + Users []*UserWithGroups `json:"users"` + Count int64 `json:"total_count"` +} + +func (u *User) EmailDomain() string { + at := strings.LastIndex(u.Email, "@") + // at >= 0 holds true and this is not checked here. It holds true, because during signup we run `mail.ParseAddress(email)` + return u.Email[at+1:] +} + +type UserPostStats struct { + LastStatusAt *int64 `json:"last_status_at,omitempty"` + LastPostDate *int64 `json:"last_post_date,omitempty"` + DaysActive *int `json:"days_active,omitempty"` + TotalPosts *int `json:"total_posts,omitempty"` +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/user_access_token.go b/vendor/github.com/mattermost/mattermost/server/public/model/user_access_token.go new file mode 100644 index 00000000..dee31f18 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/user_access_token.go @@ -0,0 +1,41 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "net/http" +) + +type UserAccessToken struct { + Id string `json:"id"` + Token string `json:"token,omitempty"` + UserId string `json:"user_id"` + Description string `json:"description"` + IsActive bool `json:"is_active"` +} + +func (t *UserAccessToken) IsValid() *AppError { + if !IsValidId(t.Id) { + return NewAppError("UserAccessToken.IsValid", "model.user_access_token.is_valid.id.app_error", nil, "", http.StatusBadRequest) + } + + if len(t.Token) != 26 { + return NewAppError("UserAccessToken.IsValid", "model.user_access_token.is_valid.token.app_error", nil, "", http.StatusBadRequest) + } + + if !IsValidId(t.UserId) { + return NewAppError("UserAccessToken.IsValid", "model.user_access_token.is_valid.user_id.app_error", nil, "", http.StatusBadRequest) + } + + if len(t.Description) > 255 { + return NewAppError("UserAccessToken.IsValid", "model.user_access_token.is_valid.description.app_error", nil, "", http.StatusBadRequest) + } + + return nil +} + +func (t *UserAccessToken) PreSave() { + t.Id = NewId() + t.IsActive = true +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/user_access_token_search.go b/vendor/github.com/mattermost/mattermost/server/public/model/user_access_token_search.go new file mode 100644 index 00000000..97fcde12 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/user_access_token_search.go @@ -0,0 +1,8 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +type UserAccessTokenSearch struct { + Term string `json:"term"` +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/user_autocomplete.go b/vendor/github.com/mattermost/mattermost/server/public/model/user_autocomplete.go new file mode 100644 index 00000000..b07131b3 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/user_autocomplete.go @@ -0,0 +1,18 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +type UserAutocompleteInChannel struct { + InChannel []*User `json:"in_channel"` + OutOfChannel []*User `json:"out_of_channel"` +} + +type UserAutocompleteInTeam struct { + InTeam []*User `json:"in_team"` +} + +type UserAutocomplete struct { + Users []*User `json:"users"` + OutOfChannel []*User `json:"out_of_channel,omitempty"` +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/user_count.go b/vendor/github.com/mattermost/mattermost/server/public/model/user_count.go new file mode 100644 index 00000000..950a6700 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/user_count.go @@ -0,0 +1,28 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +// Options for counting users +type UserCountOptions struct { + // Should include users that are bots + IncludeBotAccounts bool + // Should include deleted users (of any type) + IncludeDeleted bool + // Include remote users + IncludeRemoteUsers bool + // Exclude regular users + ExcludeRegularUsers bool + // Only include users on a specific team. "" for any team. + TeamId string + // Only include users on a specific channel. "" for any channel. + ChannelId string + // Restrict to search in a list of teams and channels + ViewRestrictions *ViewUsersRestrictions + // Only include users matching any of the given system wide roles. + Roles []string + // Only include users matching any of the given channel roles, must be used with ChannelId. + ChannelRoles []string + // Only include users matching any of the given team roles, must be used with TeamId. + TeamRoles []string +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/user_get.go b/vendor/github.com/mattermost/mattermost/server/public/model/user_get.go new file mode 100644 index 00000000..78174b2f --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/user_get.go @@ -0,0 +1,50 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +type UserGetOptions struct { + // Filters the users in the team + InTeamId string + // Filters the users not in the team + NotInTeamId string + // Filters the users in the channel + InChannelId string + // Filters the users not in the channel + NotInChannelId string + // Filters the users in the group + InGroupId string + // Filters the users not in the group + NotInGroupId string + // Filters the users group constrained + GroupConstrained bool + // Filters the users without a team + WithoutTeam bool + // Filters the inactive users + Inactive bool + // Filters the active users + Active bool + // Filters for the given role + Role string + // Filters for users matching any of the given system wide roles + Roles []string + // Filters for users matching any of the given channel roles, must be used with InChannelId + ChannelRoles []string + // Filters for users matching any of the given team roles, must be used with InTeamId + TeamRoles []string + // Sorting option + Sort string + // Restrict to search in a list of teams and channels + ViewRestrictions *ViewUsersRestrictions + // Page + Page int + // Page size + PerPage int + // Filters the users that have been updated after the given time + UpdatedAfter int64 +} + +type UserGetByIdsOptions struct { + // Since filters the users based on their UpdateAt timestamp. + Since int64 +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/user_search.go b/vendor/github.com/mattermost/mattermost/server/public/model/user_search.go new file mode 100644 index 00000000..d0480fe5 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/user_search.go @@ -0,0 +1,55 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +const UserSearchMaxLimit = 1000 +const UserSearchDefaultLimit = 100 + +// UserSearch captures the parameters provided by a client for initiating a user search. +type UserSearch struct { + Term string `json:"term"` + TeamId string `json:"team_id"` + NotInTeamId string `json:"not_in_team_id"` + InChannelId string `json:"in_channel_id"` + NotInChannelId string `json:"not_in_channel_id"` + InGroupId string `json:"in_group_id"` + GroupConstrained bool `json:"group_constrained"` + AllowInactive bool `json:"allow_inactive"` + WithoutTeam bool `json:"without_team"` + Limit int `json:"limit"` + Role string `json:"role"` + Roles []string `json:"roles"` + ChannelRoles []string `json:"channel_roles"` + TeamRoles []string `json:"team_roles"` + NotInGroupId string `json:"not_in_group_id"` +} + +// UserSearchOptions captures internal parameters derived from the user's permissions and a +// UserSearch request. +type UserSearchOptions struct { + // IsAdmin tracks whether or not the search is being conducted by an administrator. + IsAdmin bool + // AllowEmails allows search to examine the emails of users. + AllowEmails bool + // AllowFullNames allows search to examine the full names of users, vs. just usernames and nicknames. + AllowFullNames bool + // AllowInactive configures whether or not to return inactive users in the search results. + AllowInactive bool + // Narrows the search to the group constrained users + GroupConstrained bool + // Limit limits the total number of results returned. + Limit int + // Filters for the given role + Role string + // Filters for users that have any of the given system roles + Roles []string + // Filters for users that have the given channel roles to be used when searching in a channel + ChannelRoles []string + // Filters for users that have the given team roles to be used when searching in a team + TeamRoles []string + // Restrict to search in a list of teams and channels + ViewRestrictions *ViewUsersRestrictions + // List of allowed channels + ListOfAllowedChannels []string +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/user_serial_gen.go b/vendor/github.com/mattermost/mattermost/server/public/model/user_serial_gen.go new file mode 100644 index 00000000..410698e0 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/user_serial_gen.go @@ -0,0 +1,1215 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +// Code generated by github.com/tinylib/msgp DO NOT EDIT. + +import ( + "github.com/tinylib/msgp/msgp" +) + +// DecodeMsg implements msgp.Decodable +func (z *User) DecodeMsg(dc *msgp.Reader) (err error) { + var zb0001 uint32 + zb0001, err = dc.ReadArrayHeader() + if err != nil { + err = msgp.WrapError(err) + return + } + if zb0001 != 35 { + err = msgp.ArrayError{Wanted: 35, Got: zb0001} + return + } + z.Id, err = dc.ReadString() + if err != nil { + err = msgp.WrapError(err, "Id") + return + } + z.CreateAt, err = dc.ReadInt64() + if err != nil { + err = msgp.WrapError(err, "CreateAt") + return + } + z.UpdateAt, err = dc.ReadInt64() + if err != nil { + err = msgp.WrapError(err, "UpdateAt") + return + } + z.DeleteAt, err = dc.ReadInt64() + if err != nil { + err = msgp.WrapError(err, "DeleteAt") + return + } + z.Username, err = dc.ReadString() + if err != nil { + err = msgp.WrapError(err, "Username") + return + } + z.Password, err = dc.ReadString() + if err != nil { + err = msgp.WrapError(err, "Password") + return + } + if dc.IsNil() { + err = dc.ReadNil() + if err != nil { + err = msgp.WrapError(err, "AuthData") + return + } + z.AuthData = nil + } else { + if z.AuthData == nil { + z.AuthData = new(string) + } + *z.AuthData, err = dc.ReadString() + if err != nil { + err = msgp.WrapError(err, "AuthData") + return + } + } + z.AuthService, err = dc.ReadString() + if err != nil { + err = msgp.WrapError(err, "AuthService") + return + } + z.Email, err = dc.ReadString() + if err != nil { + err = msgp.WrapError(err, "Email") + return + } + z.EmailVerified, err = dc.ReadBool() + if err != nil { + err = msgp.WrapError(err, "EmailVerified") + return + } + z.Nickname, err = dc.ReadString() + if err != nil { + err = msgp.WrapError(err, "Nickname") + return + } + z.FirstName, err = dc.ReadString() + if err != nil { + err = msgp.WrapError(err, "FirstName") + return + } + z.LastName, err = dc.ReadString() + if err != nil { + err = msgp.WrapError(err, "LastName") + return + } + z.Position, err = dc.ReadString() + if err != nil { + err = msgp.WrapError(err, "Position") + return + } + z.Roles, err = dc.ReadString() + if err != nil { + err = msgp.WrapError(err, "Roles") + return + } + z.AllowMarketing, err = dc.ReadBool() + if err != nil { + err = msgp.WrapError(err, "AllowMarketing") + return + } + err = z.Props.DecodeMsg(dc) + if err != nil { + err = msgp.WrapError(err, "Props") + return + } + err = z.NotifyProps.DecodeMsg(dc) + if err != nil { + err = msgp.WrapError(err, "NotifyProps") + return + } + z.LastPasswordUpdate, err = dc.ReadInt64() + if err != nil { + err = msgp.WrapError(err, "LastPasswordUpdate") + return + } + z.LastPictureUpdate, err = dc.ReadInt64() + if err != nil { + err = msgp.WrapError(err, "LastPictureUpdate") + return + } + z.FailedAttempts, err = dc.ReadInt() + if err != nil { + err = msgp.WrapError(err, "FailedAttempts") + return + } + z.Locale, err = dc.ReadString() + if err != nil { + err = msgp.WrapError(err, "Locale") + return + } + err = z.Timezone.DecodeMsg(dc) + if err != nil { + err = msgp.WrapError(err, "Timezone") + return + } + z.MfaActive, err = dc.ReadBool() + if err != nil { + err = msgp.WrapError(err, "MfaActive") + return + } + z.MfaSecret, err = dc.ReadString() + if err != nil { + err = msgp.WrapError(err, "MfaSecret") + return + } + if dc.IsNil() { + err = dc.ReadNil() + if err != nil { + err = msgp.WrapError(err, "RemoteId") + return + } + z.RemoteId = nil + } else { + if z.RemoteId == nil { + z.RemoteId = new(string) + } + *z.RemoteId, err = dc.ReadString() + if err != nil { + err = msgp.WrapError(err, "RemoteId") + return + } + } + z.LastActivityAt, err = dc.ReadInt64() + if err != nil { + err = msgp.WrapError(err, "LastActivityAt") + return + } + z.IsBot, err = dc.ReadBool() + if err != nil { + err = msgp.WrapError(err, "IsBot") + return + } + z.BotDescription, err = dc.ReadString() + if err != nil { + err = msgp.WrapError(err, "BotDescription") + return + } + z.BotLastIconUpdate, err = dc.ReadInt64() + if err != nil { + err = msgp.WrapError(err, "BotLastIconUpdate") + return + } + z.TermsOfServiceId, err = dc.ReadString() + if err != nil { + err = msgp.WrapError(err, "TermsOfServiceId") + return + } + z.TermsOfServiceCreateAt, err = dc.ReadInt64() + if err != nil { + err = msgp.WrapError(err, "TermsOfServiceCreateAt") + return + } + z.DisableWelcomeEmail, err = dc.ReadBool() + if err != nil { + err = msgp.WrapError(err, "DisableWelcomeEmail") + return + } + z.LastLogin, err = dc.ReadInt64() + if err != nil { + err = msgp.WrapError(err, "LastLogin") + return + } + err = z.MfaUsedTimestamps.DecodeMsg(dc) + if err != nil { + err = msgp.WrapError(err, "MfaUsedTimestamps") + return + } + return +} + +// EncodeMsg implements msgp.Encodable +func (z *User) EncodeMsg(en *msgp.Writer) (err error) { + // array header, size 35 + err = en.Append(0xdc, 0x0, 0x23) + if err != nil { + return + } + err = en.WriteString(z.Id) + if err != nil { + err = msgp.WrapError(err, "Id") + return + } + err = en.WriteInt64(z.CreateAt) + if err != nil { + err = msgp.WrapError(err, "CreateAt") + return + } + err = en.WriteInt64(z.UpdateAt) + if err != nil { + err = msgp.WrapError(err, "UpdateAt") + return + } + err = en.WriteInt64(z.DeleteAt) + if err != nil { + err = msgp.WrapError(err, "DeleteAt") + return + } + err = en.WriteString(z.Username) + if err != nil { + err = msgp.WrapError(err, "Username") + return + } + err = en.WriteString(z.Password) + if err != nil { + err = msgp.WrapError(err, "Password") + return + } + if z.AuthData == nil { + err = en.WriteNil() + if err != nil { + return + } + } else { + err = en.WriteString(*z.AuthData) + if err != nil { + err = msgp.WrapError(err, "AuthData") + return + } + } + err = en.WriteString(z.AuthService) + if err != nil { + err = msgp.WrapError(err, "AuthService") + return + } + err = en.WriteString(z.Email) + if err != nil { + err = msgp.WrapError(err, "Email") + return + } + err = en.WriteBool(z.EmailVerified) + if err != nil { + err = msgp.WrapError(err, "EmailVerified") + return + } + err = en.WriteString(z.Nickname) + if err != nil { + err = msgp.WrapError(err, "Nickname") + return + } + err = en.WriteString(z.FirstName) + if err != nil { + err = msgp.WrapError(err, "FirstName") + return + } + err = en.WriteString(z.LastName) + if err != nil { + err = msgp.WrapError(err, "LastName") + return + } + err = en.WriteString(z.Position) + if err != nil { + err = msgp.WrapError(err, "Position") + return + } + err = en.WriteString(z.Roles) + if err != nil { + err = msgp.WrapError(err, "Roles") + return + } + err = en.WriteBool(z.AllowMarketing) + if err != nil { + err = msgp.WrapError(err, "AllowMarketing") + return + } + err = z.Props.EncodeMsg(en) + if err != nil { + err = msgp.WrapError(err, "Props") + return + } + err = z.NotifyProps.EncodeMsg(en) + if err != nil { + err = msgp.WrapError(err, "NotifyProps") + return + } + err = en.WriteInt64(z.LastPasswordUpdate) + if err != nil { + err = msgp.WrapError(err, "LastPasswordUpdate") + return + } + err = en.WriteInt64(z.LastPictureUpdate) + if err != nil { + err = msgp.WrapError(err, "LastPictureUpdate") + return + } + err = en.WriteInt(z.FailedAttempts) + if err != nil { + err = msgp.WrapError(err, "FailedAttempts") + return + } + err = en.WriteString(z.Locale) + if err != nil { + err = msgp.WrapError(err, "Locale") + return + } + err = z.Timezone.EncodeMsg(en) + if err != nil { + err = msgp.WrapError(err, "Timezone") + return + } + err = en.WriteBool(z.MfaActive) + if err != nil { + err = msgp.WrapError(err, "MfaActive") + return + } + err = en.WriteString(z.MfaSecret) + if err != nil { + err = msgp.WrapError(err, "MfaSecret") + return + } + if z.RemoteId == nil { + err = en.WriteNil() + if err != nil { + return + } + } else { + err = en.WriteString(*z.RemoteId) + if err != nil { + err = msgp.WrapError(err, "RemoteId") + return + } + } + err = en.WriteInt64(z.LastActivityAt) + if err != nil { + err = msgp.WrapError(err, "LastActivityAt") + return + } + err = en.WriteBool(z.IsBot) + if err != nil { + err = msgp.WrapError(err, "IsBot") + return + } + err = en.WriteString(z.BotDescription) + if err != nil { + err = msgp.WrapError(err, "BotDescription") + return + } + err = en.WriteInt64(z.BotLastIconUpdate) + if err != nil { + err = msgp.WrapError(err, "BotLastIconUpdate") + return + } + err = en.WriteString(z.TermsOfServiceId) + if err != nil { + err = msgp.WrapError(err, "TermsOfServiceId") + return + } + err = en.WriteInt64(z.TermsOfServiceCreateAt) + if err != nil { + err = msgp.WrapError(err, "TermsOfServiceCreateAt") + return + } + err = en.WriteBool(z.DisableWelcomeEmail) + if err != nil { + err = msgp.WrapError(err, "DisableWelcomeEmail") + return + } + err = en.WriteInt64(z.LastLogin) + if err != nil { + err = msgp.WrapError(err, "LastLogin") + return + } + err = z.MfaUsedTimestamps.EncodeMsg(en) + if err != nil { + err = msgp.WrapError(err, "MfaUsedTimestamps") + return + } + return +} + +// MarshalMsg implements msgp.Marshaler +func (z *User) MarshalMsg(b []byte) (o []byte, err error) { + o = msgp.Require(b, z.Msgsize()) + // array header, size 35 + o = append(o, 0xdc, 0x0, 0x23) + o = msgp.AppendString(o, z.Id) + o = msgp.AppendInt64(o, z.CreateAt) + o = msgp.AppendInt64(o, z.UpdateAt) + o = msgp.AppendInt64(o, z.DeleteAt) + o = msgp.AppendString(o, z.Username) + o = msgp.AppendString(o, z.Password) + if z.AuthData == nil { + o = msgp.AppendNil(o) + } else { + o = msgp.AppendString(o, *z.AuthData) + } + o = msgp.AppendString(o, z.AuthService) + o = msgp.AppendString(o, z.Email) + o = msgp.AppendBool(o, z.EmailVerified) + o = msgp.AppendString(o, z.Nickname) + o = msgp.AppendString(o, z.FirstName) + o = msgp.AppendString(o, z.LastName) + o = msgp.AppendString(o, z.Position) + o = msgp.AppendString(o, z.Roles) + o = msgp.AppendBool(o, z.AllowMarketing) + o, err = z.Props.MarshalMsg(o) + if err != nil { + err = msgp.WrapError(err, "Props") + return + } + o, err = z.NotifyProps.MarshalMsg(o) + if err != nil { + err = msgp.WrapError(err, "NotifyProps") + return + } + o = msgp.AppendInt64(o, z.LastPasswordUpdate) + o = msgp.AppendInt64(o, z.LastPictureUpdate) + o = msgp.AppendInt(o, z.FailedAttempts) + o = msgp.AppendString(o, z.Locale) + o, err = z.Timezone.MarshalMsg(o) + if err != nil { + err = msgp.WrapError(err, "Timezone") + return + } + o = msgp.AppendBool(o, z.MfaActive) + o = msgp.AppendString(o, z.MfaSecret) + if z.RemoteId == nil { + o = msgp.AppendNil(o) + } else { + o = msgp.AppendString(o, *z.RemoteId) + } + o = msgp.AppendInt64(o, z.LastActivityAt) + o = msgp.AppendBool(o, z.IsBot) + o = msgp.AppendString(o, z.BotDescription) + o = msgp.AppendInt64(o, z.BotLastIconUpdate) + o = msgp.AppendString(o, z.TermsOfServiceId) + o = msgp.AppendInt64(o, z.TermsOfServiceCreateAt) + o = msgp.AppendBool(o, z.DisableWelcomeEmail) + o = msgp.AppendInt64(o, z.LastLogin) + o, err = z.MfaUsedTimestamps.MarshalMsg(o) + if err != nil { + err = msgp.WrapError(err, "MfaUsedTimestamps") + return + } + return +} + +// UnmarshalMsg implements msgp.Unmarshaler +func (z *User) UnmarshalMsg(bts []byte) (o []byte, err error) { + var zb0001 uint32 + zb0001, bts, err = msgp.ReadArrayHeaderBytes(bts) + if err != nil { + err = msgp.WrapError(err) + return + } + if zb0001 != 35 { + err = msgp.ArrayError{Wanted: 35, Got: zb0001} + return + } + z.Id, bts, err = msgp.ReadStringBytes(bts) + if err != nil { + err = msgp.WrapError(err, "Id") + return + } + z.CreateAt, bts, err = msgp.ReadInt64Bytes(bts) + if err != nil { + err = msgp.WrapError(err, "CreateAt") + return + } + z.UpdateAt, bts, err = msgp.ReadInt64Bytes(bts) + if err != nil { + err = msgp.WrapError(err, "UpdateAt") + return + } + z.DeleteAt, bts, err = msgp.ReadInt64Bytes(bts) + if err != nil { + err = msgp.WrapError(err, "DeleteAt") + return + } + z.Username, bts, err = msgp.ReadStringBytes(bts) + if err != nil { + err = msgp.WrapError(err, "Username") + return + } + z.Password, bts, err = msgp.ReadStringBytes(bts) + if err != nil { + err = msgp.WrapError(err, "Password") + return + } + if msgp.IsNil(bts) { + bts, err = msgp.ReadNilBytes(bts) + if err != nil { + return + } + z.AuthData = nil + } else { + if z.AuthData == nil { + z.AuthData = new(string) + } + *z.AuthData, bts, err = msgp.ReadStringBytes(bts) + if err != nil { + err = msgp.WrapError(err, "AuthData") + return + } + } + z.AuthService, bts, err = msgp.ReadStringBytes(bts) + if err != nil { + err = msgp.WrapError(err, "AuthService") + return + } + z.Email, bts, err = msgp.ReadStringBytes(bts) + if err != nil { + err = msgp.WrapError(err, "Email") + return + } + z.EmailVerified, bts, err = msgp.ReadBoolBytes(bts) + if err != nil { + err = msgp.WrapError(err, "EmailVerified") + return + } + z.Nickname, bts, err = msgp.ReadStringBytes(bts) + if err != nil { + err = msgp.WrapError(err, "Nickname") + return + } + z.FirstName, bts, err = msgp.ReadStringBytes(bts) + if err != nil { + err = msgp.WrapError(err, "FirstName") + return + } + z.LastName, bts, err = msgp.ReadStringBytes(bts) + if err != nil { + err = msgp.WrapError(err, "LastName") + return + } + z.Position, bts, err = msgp.ReadStringBytes(bts) + if err != nil { + err = msgp.WrapError(err, "Position") + return + } + z.Roles, bts, err = msgp.ReadStringBytes(bts) + if err != nil { + err = msgp.WrapError(err, "Roles") + return + } + z.AllowMarketing, bts, err = msgp.ReadBoolBytes(bts) + if err != nil { + err = msgp.WrapError(err, "AllowMarketing") + return + } + bts, err = z.Props.UnmarshalMsg(bts) + if err != nil { + err = msgp.WrapError(err, "Props") + return + } + bts, err = z.NotifyProps.UnmarshalMsg(bts) + if err != nil { + err = msgp.WrapError(err, "NotifyProps") + return + } + z.LastPasswordUpdate, bts, err = msgp.ReadInt64Bytes(bts) + if err != nil { + err = msgp.WrapError(err, "LastPasswordUpdate") + return + } + z.LastPictureUpdate, bts, err = msgp.ReadInt64Bytes(bts) + if err != nil { + err = msgp.WrapError(err, "LastPictureUpdate") + return + } + z.FailedAttempts, bts, err = msgp.ReadIntBytes(bts) + if err != nil { + err = msgp.WrapError(err, "FailedAttempts") + return + } + z.Locale, bts, err = msgp.ReadStringBytes(bts) + if err != nil { + err = msgp.WrapError(err, "Locale") + return + } + bts, err = z.Timezone.UnmarshalMsg(bts) + if err != nil { + err = msgp.WrapError(err, "Timezone") + return + } + z.MfaActive, bts, err = msgp.ReadBoolBytes(bts) + if err != nil { + err = msgp.WrapError(err, "MfaActive") + return + } + z.MfaSecret, bts, err = msgp.ReadStringBytes(bts) + if err != nil { + err = msgp.WrapError(err, "MfaSecret") + return + } + if msgp.IsNil(bts) { + bts, err = msgp.ReadNilBytes(bts) + if err != nil { + return + } + z.RemoteId = nil + } else { + if z.RemoteId == nil { + z.RemoteId = new(string) + } + *z.RemoteId, bts, err = msgp.ReadStringBytes(bts) + if err != nil { + err = msgp.WrapError(err, "RemoteId") + return + } + } + z.LastActivityAt, bts, err = msgp.ReadInt64Bytes(bts) + if err != nil { + err = msgp.WrapError(err, "LastActivityAt") + return + } + z.IsBot, bts, err = msgp.ReadBoolBytes(bts) + if err != nil { + err = msgp.WrapError(err, "IsBot") + return + } + z.BotDescription, bts, err = msgp.ReadStringBytes(bts) + if err != nil { + err = msgp.WrapError(err, "BotDescription") + return + } + z.BotLastIconUpdate, bts, err = msgp.ReadInt64Bytes(bts) + if err != nil { + err = msgp.WrapError(err, "BotLastIconUpdate") + return + } + z.TermsOfServiceId, bts, err = msgp.ReadStringBytes(bts) + if err != nil { + err = msgp.WrapError(err, "TermsOfServiceId") + return + } + z.TermsOfServiceCreateAt, bts, err = msgp.ReadInt64Bytes(bts) + if err != nil { + err = msgp.WrapError(err, "TermsOfServiceCreateAt") + return + } + z.DisableWelcomeEmail, bts, err = msgp.ReadBoolBytes(bts) + if err != nil { + err = msgp.WrapError(err, "DisableWelcomeEmail") + return + } + z.LastLogin, bts, err = msgp.ReadInt64Bytes(bts) + if err != nil { + err = msgp.WrapError(err, "LastLogin") + return + } + bts, err = z.MfaUsedTimestamps.UnmarshalMsg(bts) + if err != nil { + err = msgp.WrapError(err, "MfaUsedTimestamps") + return + } + o = bts + return +} + +// Msgsize returns an upper bound estimate of the number of bytes occupied by the serialized message +func (z *User) Msgsize() (s int) { + s = 3 + msgp.StringPrefixSize + len(z.Id) + msgp.Int64Size + msgp.Int64Size + msgp.Int64Size + msgp.StringPrefixSize + len(z.Username) + msgp.StringPrefixSize + len(z.Password) + if z.AuthData == nil { + s += msgp.NilSize + } else { + s += msgp.StringPrefixSize + len(*z.AuthData) + } + s += msgp.StringPrefixSize + len(z.AuthService) + msgp.StringPrefixSize + len(z.Email) + msgp.BoolSize + msgp.StringPrefixSize + len(z.Nickname) + msgp.StringPrefixSize + len(z.FirstName) + msgp.StringPrefixSize + len(z.LastName) + msgp.StringPrefixSize + len(z.Position) + msgp.StringPrefixSize + len(z.Roles) + msgp.BoolSize + z.Props.Msgsize() + z.NotifyProps.Msgsize() + msgp.Int64Size + msgp.Int64Size + msgp.IntSize + msgp.StringPrefixSize + len(z.Locale) + z.Timezone.Msgsize() + msgp.BoolSize + msgp.StringPrefixSize + len(z.MfaSecret) + if z.RemoteId == nil { + s += msgp.NilSize + } else { + s += msgp.StringPrefixSize + len(*z.RemoteId) + } + s += msgp.Int64Size + msgp.BoolSize + msgp.StringPrefixSize + len(z.BotDescription) + msgp.Int64Size + msgp.StringPrefixSize + len(z.TermsOfServiceId) + msgp.Int64Size + msgp.BoolSize + msgp.Int64Size + z.MfaUsedTimestamps.Msgsize() + return +} + +// DecodeMsg implements msgp.Decodable +func (z *UserMap) DecodeMsg(dc *msgp.Reader) (err error) { + var zb0003 uint32 + zb0003, err = dc.ReadMapHeader() + if err != nil { + err = msgp.WrapError(err) + return + } + if (*z) == nil { + (*z) = make(UserMap, zb0003) + } else if len((*z)) > 0 { + for key := range *z { + delete((*z), key) + } + } + for zb0003 > 0 { + zb0003-- + var zb0001 string + var zb0002 *User + zb0001, err = dc.ReadString() + if err != nil { + err = msgp.WrapError(err) + return + } + if dc.IsNil() { + err = dc.ReadNil() + if err != nil { + err = msgp.WrapError(err, zb0001) + return + } + zb0002 = nil + } else { + if zb0002 == nil { + zb0002 = new(User) + } + err = zb0002.DecodeMsg(dc) + if err != nil { + err = msgp.WrapError(err, zb0001) + return + } + } + (*z)[zb0001] = zb0002 + } + return +} + +// EncodeMsg implements msgp.Encodable +func (z UserMap) EncodeMsg(en *msgp.Writer) (err error) { + err = en.WriteMapHeader(uint32(len(z))) + if err != nil { + err = msgp.WrapError(err) + return + } + for zb0004, zb0005 := range z { + err = en.WriteString(zb0004) + if err != nil { + err = msgp.WrapError(err) + return + } + if zb0005 == nil { + err = en.WriteNil() + if err != nil { + return + } + } else { + err = zb0005.EncodeMsg(en) + if err != nil { + err = msgp.WrapError(err, zb0004) + return + } + } + } + return +} + +// MarshalMsg implements msgp.Marshaler +func (z UserMap) MarshalMsg(b []byte) (o []byte, err error) { + o = msgp.Require(b, z.Msgsize()) + o = msgp.AppendMapHeader(o, uint32(len(z))) + for zb0004, zb0005 := range z { + o = msgp.AppendString(o, zb0004) + if zb0005 == nil { + o = msgp.AppendNil(o) + } else { + o, err = zb0005.MarshalMsg(o) + if err != nil { + err = msgp.WrapError(err, zb0004) + return + } + } + } + return +} + +// UnmarshalMsg implements msgp.Unmarshaler +func (z *UserMap) UnmarshalMsg(bts []byte) (o []byte, err error) { + var zb0003 uint32 + zb0003, bts, err = msgp.ReadMapHeaderBytes(bts) + if err != nil { + err = msgp.WrapError(err) + return + } + if (*z) == nil { + (*z) = make(UserMap, zb0003) + } else if len((*z)) > 0 { + for key := range *z { + delete((*z), key) + } + } + for zb0003 > 0 { + var zb0001 string + var zb0002 *User + zb0003-- + zb0001, bts, err = msgp.ReadStringBytes(bts) + if err != nil { + err = msgp.WrapError(err) + return + } + if msgp.IsNil(bts) { + bts, err = msgp.ReadNilBytes(bts) + if err != nil { + return + } + zb0002 = nil + } else { + if zb0002 == nil { + zb0002 = new(User) + } + bts, err = zb0002.UnmarshalMsg(bts) + if err != nil { + err = msgp.WrapError(err, zb0001) + return + } + } + (*z)[zb0001] = zb0002 + } + o = bts + return +} + +// Msgsize returns an upper bound estimate of the number of bytes occupied by the serialized message +func (z UserMap) Msgsize() (s int) { + s = msgp.MapHeaderSize + if z != nil { + for zb0004, zb0005 := range z { + _ = zb0005 + s += msgp.StringPrefixSize + len(zb0004) + if zb0005 == nil { + s += msgp.NilSize + } else { + s += zb0005.Msgsize() + } + } + } + return +} + +// DecodeMsg implements msgp.Decodable +func (z *UserPostStats) DecodeMsg(dc *msgp.Reader) (err error) { + var field []byte + _ = field + var zb0001 uint32 + zb0001, err = dc.ReadMapHeader() + if err != nil { + err = msgp.WrapError(err) + return + } + for zb0001 > 0 { + zb0001-- + field, err = dc.ReadMapKeyPtr() + if err != nil { + err = msgp.WrapError(err) + return + } + switch msgp.UnsafeString(field) { + case "LastStatusAt": + if dc.IsNil() { + err = dc.ReadNil() + if err != nil { + err = msgp.WrapError(err, "LastStatusAt") + return + } + z.LastStatusAt = nil + } else { + if z.LastStatusAt == nil { + z.LastStatusAt = new(int64) + } + *z.LastStatusAt, err = dc.ReadInt64() + if err != nil { + err = msgp.WrapError(err, "LastStatusAt") + return + } + } + case "LastPostDate": + if dc.IsNil() { + err = dc.ReadNil() + if err != nil { + err = msgp.WrapError(err, "LastPostDate") + return + } + z.LastPostDate = nil + } else { + if z.LastPostDate == nil { + z.LastPostDate = new(int64) + } + *z.LastPostDate, err = dc.ReadInt64() + if err != nil { + err = msgp.WrapError(err, "LastPostDate") + return + } + } + case "DaysActive": + if dc.IsNil() { + err = dc.ReadNil() + if err != nil { + err = msgp.WrapError(err, "DaysActive") + return + } + z.DaysActive = nil + } else { + if z.DaysActive == nil { + z.DaysActive = new(int) + } + *z.DaysActive, err = dc.ReadInt() + if err != nil { + err = msgp.WrapError(err, "DaysActive") + return + } + } + case "TotalPosts": + if dc.IsNil() { + err = dc.ReadNil() + if err != nil { + err = msgp.WrapError(err, "TotalPosts") + return + } + z.TotalPosts = nil + } else { + if z.TotalPosts == nil { + z.TotalPosts = new(int) + } + *z.TotalPosts, err = dc.ReadInt() + if err != nil { + err = msgp.WrapError(err, "TotalPosts") + return + } + } + default: + err = dc.Skip() + if err != nil { + err = msgp.WrapError(err) + return + } + } + } + return +} + +// EncodeMsg implements msgp.Encodable +func (z *UserPostStats) EncodeMsg(en *msgp.Writer) (err error) { + // map header, size 4 + // write "LastStatusAt" + err = en.Append(0x84, 0xac, 0x4c, 0x61, 0x73, 0x74, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x41, 0x74) + if err != nil { + return + } + if z.LastStatusAt == nil { + err = en.WriteNil() + if err != nil { + return + } + } else { + err = en.WriteInt64(*z.LastStatusAt) + if err != nil { + err = msgp.WrapError(err, "LastStatusAt") + return + } + } + // write "LastPostDate" + err = en.Append(0xac, 0x4c, 0x61, 0x73, 0x74, 0x50, 0x6f, 0x73, 0x74, 0x44, 0x61, 0x74, 0x65) + if err != nil { + return + } + if z.LastPostDate == nil { + err = en.WriteNil() + if err != nil { + return + } + } else { + err = en.WriteInt64(*z.LastPostDate) + if err != nil { + err = msgp.WrapError(err, "LastPostDate") + return + } + } + // write "DaysActive" + err = en.Append(0xaa, 0x44, 0x61, 0x79, 0x73, 0x41, 0x63, 0x74, 0x69, 0x76, 0x65) + if err != nil { + return + } + if z.DaysActive == nil { + err = en.WriteNil() + if err != nil { + return + } + } else { + err = en.WriteInt(*z.DaysActive) + if err != nil { + err = msgp.WrapError(err, "DaysActive") + return + } + } + // write "TotalPosts" + err = en.Append(0xaa, 0x54, 0x6f, 0x74, 0x61, 0x6c, 0x50, 0x6f, 0x73, 0x74, 0x73) + if err != nil { + return + } + if z.TotalPosts == nil { + err = en.WriteNil() + if err != nil { + return + } + } else { + err = en.WriteInt(*z.TotalPosts) + if err != nil { + err = msgp.WrapError(err, "TotalPosts") + return + } + } + return +} + +// MarshalMsg implements msgp.Marshaler +func (z *UserPostStats) MarshalMsg(b []byte) (o []byte, err error) { + o = msgp.Require(b, z.Msgsize()) + // map header, size 4 + // string "LastStatusAt" + o = append(o, 0x84, 0xac, 0x4c, 0x61, 0x73, 0x74, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x41, 0x74) + if z.LastStatusAt == nil { + o = msgp.AppendNil(o) + } else { + o = msgp.AppendInt64(o, *z.LastStatusAt) + } + // string "LastPostDate" + o = append(o, 0xac, 0x4c, 0x61, 0x73, 0x74, 0x50, 0x6f, 0x73, 0x74, 0x44, 0x61, 0x74, 0x65) + if z.LastPostDate == nil { + o = msgp.AppendNil(o) + } else { + o = msgp.AppendInt64(o, *z.LastPostDate) + } + // string "DaysActive" + o = append(o, 0xaa, 0x44, 0x61, 0x79, 0x73, 0x41, 0x63, 0x74, 0x69, 0x76, 0x65) + if z.DaysActive == nil { + o = msgp.AppendNil(o) + } else { + o = msgp.AppendInt(o, *z.DaysActive) + } + // string "TotalPosts" + o = append(o, 0xaa, 0x54, 0x6f, 0x74, 0x61, 0x6c, 0x50, 0x6f, 0x73, 0x74, 0x73) + if z.TotalPosts == nil { + o = msgp.AppendNil(o) + } else { + o = msgp.AppendInt(o, *z.TotalPosts) + } + return +} + +// UnmarshalMsg implements msgp.Unmarshaler +func (z *UserPostStats) UnmarshalMsg(bts []byte) (o []byte, err error) { + var field []byte + _ = field + var zb0001 uint32 + zb0001, bts, err = msgp.ReadMapHeaderBytes(bts) + if err != nil { + err = msgp.WrapError(err) + return + } + for zb0001 > 0 { + zb0001-- + field, bts, err = msgp.ReadMapKeyZC(bts) + if err != nil { + err = msgp.WrapError(err) + return + } + switch msgp.UnsafeString(field) { + case "LastStatusAt": + if msgp.IsNil(bts) { + bts, err = msgp.ReadNilBytes(bts) + if err != nil { + return + } + z.LastStatusAt = nil + } else { + if z.LastStatusAt == nil { + z.LastStatusAt = new(int64) + } + *z.LastStatusAt, bts, err = msgp.ReadInt64Bytes(bts) + if err != nil { + err = msgp.WrapError(err, "LastStatusAt") + return + } + } + case "LastPostDate": + if msgp.IsNil(bts) { + bts, err = msgp.ReadNilBytes(bts) + if err != nil { + return + } + z.LastPostDate = nil + } else { + if z.LastPostDate == nil { + z.LastPostDate = new(int64) + } + *z.LastPostDate, bts, err = msgp.ReadInt64Bytes(bts) + if err != nil { + err = msgp.WrapError(err, "LastPostDate") + return + } + } + case "DaysActive": + if msgp.IsNil(bts) { + bts, err = msgp.ReadNilBytes(bts) + if err != nil { + return + } + z.DaysActive = nil + } else { + if z.DaysActive == nil { + z.DaysActive = new(int) + } + *z.DaysActive, bts, err = msgp.ReadIntBytes(bts) + if err != nil { + err = msgp.WrapError(err, "DaysActive") + return + } + } + case "TotalPosts": + if msgp.IsNil(bts) { + bts, err = msgp.ReadNilBytes(bts) + if err != nil { + return + } + z.TotalPosts = nil + } else { + if z.TotalPosts == nil { + z.TotalPosts = new(int) + } + *z.TotalPosts, bts, err = msgp.ReadIntBytes(bts) + if err != nil { + err = msgp.WrapError(err, "TotalPosts") + return + } + } + default: + bts, err = msgp.Skip(bts) + if err != nil { + err = msgp.WrapError(err) + return + } + } + } + o = bts + return +} + +// Msgsize returns an upper bound estimate of the number of bytes occupied by the serialized message +func (z *UserPostStats) Msgsize() (s int) { + s = 1 + 13 + if z.LastStatusAt == nil { + s += msgp.NilSize + } else { + s += msgp.Int64Size + } + s += 13 + if z.LastPostDate == nil { + s += msgp.NilSize + } else { + s += msgp.Int64Size + } + s += 11 + if z.DaysActive == nil { + s += msgp.NilSize + } else { + s += msgp.IntSize + } + s += 11 + if z.TotalPosts == nil { + s += msgp.NilSize + } else { + s += msgp.IntSize + } + return +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/user_terms_of_service.go b/vendor/github.com/mattermost/mattermost/server/public/model/user_terms_of_service.go new file mode 100644 index 00000000..880c0786 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/user_terms_of_service.go @@ -0,0 +1,48 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "fmt" + "net/http" +) + +type UserTermsOfService struct { + UserId string `json:"user_id"` + TermsOfServiceId string `json:"terms_of_service_id"` + CreateAt int64 `json:"create_at"` +} + +func (ut *UserTermsOfService) IsValid() *AppError { + if !IsValidId(ut.UserId) { + return InvalidUserTermsOfServiceError("user_id", ut.UserId) + } + + if !IsValidId(ut.TermsOfServiceId) { + return InvalidUserTermsOfServiceError("terms_of_service_id", ut.UserId) + } + + if ut.CreateAt == 0 { + return InvalidUserTermsOfServiceError("create_at", ut.UserId) + } + + return nil +} + +func (ut *UserTermsOfService) PreSave() { + if ut.UserId == "" { + ut.UserId = NewId() + } + + ut.CreateAt = GetMillis() +} + +func InvalidUserTermsOfServiceError(fieldName string, userTermsOfServiceId string) *AppError { + id := fmt.Sprintf("model.user_terms_of_service.is_valid.%s.app_error", fieldName) + details := "" + if userTermsOfServiceId != "" { + details = "user_terms_of_service_user_id=" + userTermsOfServiceId + } + return NewAppError("UserTermsOfService.IsValid", id, nil, details, http.StatusBadRequest) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/users_stats.go b/vendor/github.com/mattermost/mattermost/server/public/model/users_stats.go new file mode 100644 index 00000000..1a7becf2 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/users_stats.go @@ -0,0 +1,8 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +type UsersStats struct { + TotalUsersCount int64 `json:"total_users_count"` +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/utils.go b/vendor/github.com/mattermost/mattermost/server/public/model/utils.go new file mode 100644 index 00000000..e486da85 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/utils.go @@ -0,0 +1,900 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "bytes" + "crypto/rand" + "database/sql/driver" + "encoding/base32" + "encoding/json" + "fmt" + "io" + "maps" + "net" + "net/mail" + "net/url" + "os" + "regexp" + "slices" + "sort" + "strings" + "sync" + "time" + "unicode" + + "github.com/pborman/uuid" + "github.com/pkg/errors" + + "github.com/mattermost/mattermost/server/public/shared/i18n" + "github.com/mattermost/mattermost/server/public/shared/mlog" +) + +const ( + LowercaseLetters = "abcdefghijklmnopqrstuvwxyz" + UppercaseLetters = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" + NUMBERS = "0123456789" + SYMBOLS = " !\"\\#$%&'()*+,-./:;<=>?@[]^_`|~" + BinaryParamKey = "MM_BINARY_PARAMETERS" + NoTranslation = "" + maxPropSizeBytes = 1024 * 1024 + PayloadParseError = "api.payload.parse.error" +) + +var ErrMaxPropSizeExceeded = fmt.Errorf("max prop size of %d exceeded", maxPropSizeBytes) + +//msgp:ignore StringInterface StringSet +type StringInterface map[string]any +type StringSet map[string]struct{} + +//msgp:tuple StringArray +type StringArray []string + +func (ss StringSet) Has(val string) bool { + _, ok := ss[val] + return ok +} + +func (ss StringSet) Add(val string) { + ss[val] = struct{}{} +} + +func (ss StringSet) Val() []string { + keys := make([]string, 0, len(ss)) + for k := range ss { + keys = append(keys, k) + } + return keys +} + +func (sa StringArray) Remove(input string) StringArray { + for index := range sa { + if sa[index] == input { + ret := make(StringArray, 0, len(sa)-1) + ret = append(ret, sa[:index]...) + return append(ret, sa[index+1:]...) + } + } + return sa +} + +func (sa StringArray) Contains(input string) bool { + return slices.Contains(sa, input) +} +func (sa StringArray) Equals(input StringArray) bool { + if len(sa) != len(input) { + return false + } + + for index := range sa { + if sa[index] != input[index] { + return false + } + } + + return true +} + +// Value converts StringArray to database value +func (sa StringArray) Value() (driver.Value, error) { + sz := 0 + for i := range sa { + sz += len(sa[i]) + if sz > maxPropSizeBytes { + return nil, ErrMaxPropSizeExceeded + } + } + + j, err := json.Marshal(sa) + if err != nil { + return nil, err + } + // non utf8 characters are not supported https://mattermost.atlassian.net/browse/MM-41066 + return string(j), err +} + +// Scan converts database column value to StringArray +func (sa *StringArray) Scan(value any) error { + if value == nil { + return nil + } + + buf, ok := value.([]byte) + if ok { + return json.Unmarshal(buf, sa) + } + + str, ok := value.(string) + if ok { + return json.Unmarshal([]byte(str), sa) + } + + return errors.New("received value is neither a byte slice nor string") +} + +// Scan converts database column value to StringMap +func (m *StringMap) Scan(value any) error { + if value == nil { + return nil + } + + buf, ok := value.([]byte) + if ok { + return json.Unmarshal(buf, m) + } + + str, ok := value.(string) + if ok { + return json.Unmarshal([]byte(str), m) + } + + return errors.New("received value is neither a byte slice nor string") +} + +// Value converts StringMap to database value +func (m StringMap) Value() (driver.Value, error) { + ok := m[BinaryParamKey] + delete(m, BinaryParamKey) + + sz := 0 + for k := range m { + sz += len(k) + len(m[k]) + if sz > maxPropSizeBytes { + return nil, ErrMaxPropSizeExceeded + } + } + + buf, err := json.Marshal(m) + if err != nil { + return nil, err + } + if ok == "true" { + return append([]byte{0x01}, buf...), nil + } else if ok == "false" { + return buf, nil + } + // Key wasn't found. We fall back to the default case. + return string(buf), nil +} + +func (m StringMap) MarshalJSON() ([]byte, error) { + return json.Marshal((map[string]string)(m)) +} + +func (si *StringInterface) Scan(value any) error { + if value == nil { + return nil + } + + buf, ok := value.([]byte) + if ok { + return json.Unmarshal(buf, si) + } + + str, ok := value.(string) + if ok { + return json.Unmarshal([]byte(str), si) + } + + return errors.New("received value is neither a byte slice nor string") +} + +// Value converts StringInterface to database value +func (si StringInterface) Value() (driver.Value, error) { + j, err := json.Marshal(si) + if err != nil { + return nil, err + } + + if len(j) > maxPropSizeBytes { + return nil, ErrMaxPropSizeExceeded + } + + // non utf8 characters are not supported https://mattermost.atlassian.net/browse/MM-41066 + return string(j), err +} + +func (si StringInterface) MarshalJSON() ([]byte, error) { + return json.Marshal((map[string]any)(si)) +} + +var translateFunc i18n.TranslateFunc +var translateFuncOnce sync.Once + +func AppErrorInit(t i18n.TranslateFunc) { + translateFuncOnce.Do(func() { + translateFunc = t + }) +} + +//msgp:ignore AppError +type AppError struct { + Id string `json:"id"` + Message string `json:"message"` // Message to be display to the end user without debugging information + DetailedError string `json:"detailed_error"` // Internal error string to help the developer + RequestId string `json:"request_id,omitempty"` // The RequestId that's also set in the header + StatusCode int `json:"status_code,omitempty"` // The http status code + Where string `json:"-"` // The function where it happened in the form of Struct.Func + SkipTranslation bool `json:"-"` // Whether translation for the error should be skipped. + params map[string]any + wrapped error +} + +const maxErrorLength = 1024 + +func (er *AppError) Error() string { + var sb strings.Builder + + // render the error information + if er.Where != "" { + sb.WriteString(er.Where) + sb.WriteString(": ") + } + + if er.Message != NoTranslation { + sb.WriteString(er.Message) + } + + // only render the detailed error when it's present + if er.DetailedError != "" { + if er.Message != NoTranslation { + sb.WriteString(", ") + } + sb.WriteString(er.DetailedError) + } + + // render the wrapped error + err := er.wrapped + if err != nil { + sb.WriteString(", ") + sb.WriteString(err.Error()) + } + + res := sb.String() + if len(res) > maxErrorLength { + res = res[:maxErrorLength] + "..." + } + return res +} + +func (er *AppError) Translate(T i18n.TranslateFunc) { + if er.SkipTranslation { + return + } + + if T == nil { + er.Message = er.Id + return + } + + if er.params == nil { + er.Message = T(er.Id) + } else { + er.Message = T(er.Id, er.params) + } +} + +func (er *AppError) SystemMessage(T i18n.TranslateFunc) string { + if er.params == nil { + return T(er.Id) + } + return T(er.Id, er.params) +} + +func (er *AppError) ToJSON() string { + // turn the wrapped error into a detailed message + detailed := er.DetailedError + defer func() { + er.DetailedError = detailed + }() + + er.wrappedToDetailed() + + b, _ := json.Marshal(er) + return string(b) +} + +func (er *AppError) wrappedToDetailed() { + if er.wrapped == nil { + return + } + + if er.DetailedError != "" { + er.DetailedError += ", " + } + + er.DetailedError += er.wrapped.Error() +} + +func (er *AppError) Unwrap() error { + return er.wrapped +} + +func (er *AppError) Wrap(err error) *AppError { + er.wrapped = err + return er +} + +func (er *AppError) WipeDetailed() { + er.wrapped = nil + er.DetailedError = "" +} + +// AppErrorFromJSON will try to decode the input into an AppError. +func AppErrorFromJSON(r io.Reader) error { + data, err := io.ReadAll(r) + if err != nil { + return err + } + + var er AppError + err = json.NewDecoder(bytes.NewReader(data)).Decode(&er) + if err != nil { + // If the request exceeded FileSettings.MaxFileSize a plain error gets returned. Convert it into an AppError. + if string(data) == "http: request body too large\n" { + return errors.New("The request was too large. Consider asking your System Admin to raise the FileSettings.MaxFileSize setting.") + } + + return errors.Wrapf(err, "failed to decode JSON payload into AppError. Body: %s", string(data)) + } + + return &er +} + +func NewAppError(where string, id string, params map[string]any, details string, status int) *AppError { + ap := &AppError{ + Id: id, + params: params, + Message: id, + Where: where, + DetailedError: details, + StatusCode: status, + } + ap.Translate(translateFunc) + return ap +} + +var encoding = base32.NewEncoding("ybndrfg8ejkmcpqxot1uwisza345h769").WithPadding(base32.NoPadding) + +// NewId is a globally unique identifier. It is a [A-Z0-9] string 26 +// characters long. It is a UUID version 4 Guid that is zbased32 encoded +// without the padding. +func NewId() string { + return encoding.EncodeToString(uuid.NewRandom()) +} + +// NewUsername is a NewId prefixed with a letter to make valid username +func NewUsername() string { + return "a" + NewId() +} + +// NewRandomTeamName is a NewId that will be a valid team name. +func NewRandomTeamName() string { + teamName := NewId() + for IsReservedTeamName(teamName) { + teamName = NewId() + } + return teamName +} + +// NewRandomString returns a random string of the given length. +// The resulting entropy will be (5 * length) bits. +func NewRandomString(length int) string { + data := make([]byte, 1+(length*5/8)) + rand.Read(data) + return encoding.EncodeToString(data)[:length] +} + +// GetMillis is a convenience method to get milliseconds since epoch. +func GetMillis() int64 { + return GetMillisForTime(time.Now()) +} + +// GetMillisForTime is a convenience method to get milliseconds since epoch for provided Time. +func GetMillisForTime(thisTime time.Time) int64 { + return thisTime.UnixMilli() +} + +// GetTimeForMillis is a convenience method to get time.Time for milliseconds since epoch. +func GetTimeForMillis(millis int64) time.Time { + return time.UnixMilli(millis) +} + +// PadDateStringZeros is a convenience method to pad 2 digit date parts with zeros to meet ISO 8601 format +func PadDateStringZeros(dateString string) string { + parts := strings.Split(dateString, "-") + for index, part := range parts { + if len(part) == 1 { + parts[index] = "0" + part + } + } + dateString = strings.Join(parts[:], "-") + return dateString +} + +// GetStartOfDayMillis is a convenience method to get milliseconds since epoch for provided date's start of day +func GetStartOfDayMillis(thisTime time.Time, timeZoneOffset int) int64 { + localSearchTimeZone := time.FixedZone("Local Search Time Zone", timeZoneOffset) + resultTime := time.Date(thisTime.Year(), thisTime.Month(), thisTime.Day(), 0, 0, 0, 0, localSearchTimeZone) + return GetMillisForTime(resultTime) +} + +// GetEndOfDayMillis is a convenience method to get milliseconds since epoch for provided date's end of day +func GetEndOfDayMillis(thisTime time.Time, timeZoneOffset int) int64 { + localSearchTimeZone := time.FixedZone("Local Search Time Zone", timeZoneOffset) + resultTime := time.Date(thisTime.Year(), thisTime.Month(), thisTime.Day(), 23, 59, 59, 999999999, localSearchTimeZone) + return GetMillisForTime(resultTime) +} + +func CopyStringMap(originalMap map[string]string) map[string]string { + copyMap := make(map[string]string, len(originalMap)) + maps.Copy(copyMap, originalMap) + return copyMap +} + +// MapToJSON converts a map to a json string +func MapToJSON(objmap map[string]string) string { + b, _ := json.Marshal(objmap) + return string(b) +} + +// MapBoolToJSON converts a map to a json string +func MapBoolToJSON(objmap map[string]bool) string { + b, _ := json.Marshal(objmap) + return string(b) +} + +// MapFromJSON will decode the key/value pair map +func MapFromJSON(data io.Reader) map[string]string { + var objmap map[string]string + + json.NewDecoder(data).Decode(&objmap) + if objmap == nil { + return make(map[string]string) + } + + return objmap +} + +// MapFromJSON will decode the key/value pair map +func MapBoolFromJSON(data io.Reader) map[string]bool { + var objmap map[string]bool + + json.NewDecoder(data).Decode(&objmap) + if objmap == nil { + return make(map[string]bool) + } + + return objmap +} + +func ArrayToJSON(objmap []string) string { + b, _ := json.Marshal(objmap) + return string(b) +} + +// Deprecated: ArrayFromJSON is deprecated, +// use SortedArrayFromJSON or NonSortedArrayFromJSON instead +func ArrayFromJSON(data io.Reader) []string { + var objmap []string + json.NewDecoder(data).Decode(&objmap) + if objmap == nil { + return make([]string, 0) + } + return objmap +} + +func SortedArrayFromJSON(data io.Reader) ([]string, error) { + var obj []string + err := json.NewDecoder(data).Decode(&obj) + if err != nil || obj == nil { + return nil, err + } + + // Remove duplicate IDs as it can bring a significant load to the database. + return RemoveDuplicateStrings(obj), nil +} + +func NonSortedArrayFromJSON(data io.Reader) ([]string, error) { + var obj []string + err := json.NewDecoder(data).Decode(&obj) + if err != nil || obj == nil { + return nil, err + } + + // Remove duplicate IDs, but don't sort. + return RemoveDuplicateStringsNonSort(obj), nil +} + +func ArrayFromInterface(data any) []string { + stringArray := []string{} + + dataArray, ok := data.([]any) + if !ok { + return stringArray + } + + for _, v := range dataArray { + if str, ok := v.(string); ok { + stringArray = append(stringArray, str) + } + } + + return stringArray +} + +func StringInterfaceToJSON(objmap map[string]any) string { + b, _ := json.Marshal(objmap) + return string(b) +} + +func StringInterfaceFromJSON(data io.Reader) map[string]any { + var objmap map[string]any + + json.NewDecoder(data).Decode(&objmap) + if objmap == nil { + return make(map[string]any) + } + + return objmap +} + +func StructFromJSONLimited[V any](data io.Reader, obj *V) error { + err := json.NewDecoder(data).Decode(&obj) + if err != nil || obj == nil { + return err + } + + return nil +} + +// ToJSON serializes an arbitrary data type to JSON, discarding the error. +func ToJSON(v any) []byte { + b, _ := json.Marshal(v) + return b +} + +func GetServerIPAddress(iface string) string { + var addrs []net.Addr + if iface == "" { + var err error + addrs, err = net.InterfaceAddrs() + if err != nil { + return "" + } + } else { + interfaces, err := net.Interfaces() + if err != nil { + return "" + } + for _, i := range interfaces { + if i.Name == iface { + addrs, err = i.Addrs() + if err != nil { + return "" + } + break + } + } + } + + for _, addr := range addrs { + if ip, ok := addr.(*net.IPNet); ok && !ip.IP.IsLoopback() && !ip.IP.IsLinkLocalUnicast() && !ip.IP.IsLinkLocalMulticast() { + if ip.IP.To4() != nil { + return ip.IP.String() + } + } + } + + return "" +} + +func isLower(s string) bool { + return strings.ToLower(s) == s +} + +func IsValidEmail(input string) bool { + if !isLower(input) { + return false + } + + if addr, err := mail.ParseAddress(input); err != nil { + return false + } else if addr.Address != input { + // mail.ParseAddress accepts input of the form "Billy Bob " or "", + // which we don't allow. We compare the user input with the parsed addr.Address to ensure we only + // accept plain addresses like "billy@example.com" + + // Log a warning for admins in case pre-existing users with emails like , which used + // to be valid before https://github.com/mattermost/mattermost/pull/29661, know how to deal with this + // error. We don't need to check for the case addr.Name != "", since that has always been rejected + if addr.Name == "" { + mlog.Warn("email seems to be enclosed in angle brackets, which is not valid; if this relates to an existing user, use the following mmctl command to modify their email: `mmctl user email \"\" affecteduser@domain.com`", mlog.String("email", input)) + } + return false + } + + // mail.ParseAddress accepts quoted strings for the address + // which can lead to sending to the wrong email address + // check for multiple '@' symbols and invalidate + if strings.Count(input, "@") > 1 { + return false + } + return true +} + +var reservedName = []string{ + "admin", + "api", + "channel", + "claim", + "error", + "files", + "help", + "landing", + "login", + "mfa", + "oauth", + "plug", + "plugins", + "post", + "signup", + "boards", + "playbooks", +} + +func IsValidChannelIdentifier(s string) bool { + return validSimpleAlphaNum.MatchString(s) && len(s) >= ChannelNameMinLength +} + +var ( + validAlphaNum = regexp.MustCompile(`^[a-z0-9]+([a-z\-0-9]+|(__)?)[a-z0-9]+$`) + validAlphaNumHyphenUnderscore = regexp.MustCompile(`^[a-z0-9]+([a-z\-\_0-9]+|(__)?)[a-z0-9]+$`) + validSimpleAlphaNum = regexp.MustCompile(`^[a-z0-9]+([a-z\-\_0-9]+|(__)?)[a-z0-9]*$`) + validSimpleAlphaNumHyphenUnderscore = regexp.MustCompile(`^[a-zA-Z0-9\-_]+$`) + validSimpleAlphaNumHyphenUnderscorePlus = regexp.MustCompile(`^[a-zA-Z0-9+_-]+$`) +) + +func isValidAlphaNum(s string) bool { + return validAlphaNum.MatchString(s) +} + +func IsValidAlphaNumHyphenUnderscore(s string, withFormat bool) bool { + if withFormat { + return validAlphaNumHyphenUnderscore.MatchString(s) + } + return validSimpleAlphaNumHyphenUnderscore.MatchString(s) +} + +func IsValidAlphaNumHyphenUnderscorePlus(s string) bool { + return validSimpleAlphaNumHyphenUnderscorePlus.MatchString(s) +} + +func Etag(parts ...any) string { + var etag strings.Builder + etag.WriteString(CurrentVersion) + + for _, part := range parts { + etag.WriteString(fmt.Sprintf(".%v", part)) + } + + return etag.String() +} + +var ( + validHashtag = regexp.MustCompile(`^(#\pL[\pL\d\-_.]*[\pL\d])$`) + puncStart = regexp.MustCompile(`^[^\pL\d\s#]+`) + hashtagStart = regexp.MustCompile(`^#{2,}`) + puncEnd = regexp.MustCompile(`[^\pL\d\s]+$`) +) + +func ParseHashtags(text string) (string, string) { + words := strings.Fields(text) + + var hashtagStringSb strings.Builder + var plainString strings.Builder + for _, word := range words { + // trim off surrounding punctuation + word = puncStart.ReplaceAllString(word, "") + word = puncEnd.ReplaceAllString(word, "") + + // and remove extra pound #s + word = hashtagStart.ReplaceAllString(word, "#") + + if validHashtag.MatchString(word) { + hashtagStringSb.WriteString(" " + word) + } else { + plainString.WriteString(" " + word) + } + } + hashtagString := hashtagStringSb.String() + + if len(hashtagString) > 1000 { + hashtagString = hashtagString[:999] + lastSpace := strings.LastIndex(hashtagString, " ") + if lastSpace > -1 { + hashtagString = hashtagString[:lastSpace] + } else { + hashtagString = "" + } + } + + return strings.TrimSpace(hashtagString), strings.TrimSpace(plainString.String()) +} + +func ClearMentionTags(post string) string { + post = strings.Replace(post, "", "", -1) + post = strings.Replace(post, "", "", -1) + return post +} + +func IsValidHTTPURL(rawURL string) bool { + if strings.Index(rawURL, "http://") != 0 && strings.Index(rawURL, "https://") != 0 { + return false + } + + if u, err := url.ParseRequestURI(rawURL); err != nil || u.Scheme == "" || u.Host == "" { + return false + } + + return true +} + +func IsValidId(value string) bool { + if len(value) != 26 { + return false + } + + for _, r := range value { + if !unicode.IsLetter(r) && !unicode.IsNumber(r) { + return false + } + } + + return true +} + +// RemoveDuplicateStrings does an in-place removal of duplicate strings +// from the input slice. The original slice gets modified. +func RemoveDuplicateStrings(in []string) []string { + // In-place de-dup. + // Copied from https://github.com/golang/go/wiki/SliceTricks#in-place-deduplicate-comparable + if len(in) == 0 { + return in + } + sort.Strings(in) + j := 0 + for i := 1; i < len(in); i++ { + if in[j] == in[i] { + continue + } + j++ + in[j] = in[i] + } + return in[:j+1] +} + +// RemoveDuplicateStringsNonSort does a removal of duplicate +// strings using a map. +func RemoveDuplicateStringsNonSort(in []string) []string { + allKeys := make(map[string]bool) + list := []string{} + for _, item := range in { + if _, value := allKeys[item]; !value { + allKeys[item] = true + list = append(list, item) + } + } + return list +} + +func GetPreferredTimezone(timezone StringMap) string { + if timezone["useAutomaticTimezone"] == "true" { + return timezone["automaticTimezone"] + } + + return timezone["manualTimezone"] +} + +// SanitizeUnicode will remove undesirable Unicode characters from a string. +func SanitizeUnicode(s string) string { + return strings.Map(filterBlocklist, s) +} + +// filterBlocklist returns `r` if it is not in the blocklist, otherwise drop (-1). +// Blocklist is taken from https://www.w3.org/TR/unicode-xml/#Charlist +func filterBlocklist(r rune) rune { + const drop = -1 + switch r { + case '\u0340', '\u0341': // clones of grave and acute; deprecated in Unicode + return drop + case '\u17A3', '\u17D3': // obsolete characters for Khmer; deprecated in Unicode + return drop + case '\u2028', '\u2029': // line and paragraph separator + return drop + case '\u202A', '\u202B', '\u202C', '\u202D', '\u202E': // BIDI embedding controls + return drop + case '\u206A', '\u206B': // activate/inhibit symmetric swapping; deprecated in Unicode + return drop + case '\u206C', '\u206D': // activate/inhibit Arabic form shaping; deprecated in Unicode + return drop + case '\u206E', '\u206F': // activate/inhibit national digit shapes; deprecated in Unicode + return drop + case '\uFFF9', '\uFFFA', '\uFFFB': // interlinear annotation characters + return drop + case '\uFEFF': // byte order mark + return drop + case '\uFFFC': // object replacement character + return drop + } + + // Scoping for musical notation + if r >= 0x0001D173 && r <= 0x0001D17A { + return drop + } + + // Language tag code points + if r >= 0x000E0000 && r <= 0x000E007F { + return drop + } + + return r +} + +func IsCloud() bool { + return os.Getenv("MM_CLOUD_INSTALLATION_ID") != "" +} + +func SliceToMapKey(s ...string) map[string]any { + m := make(map[string]any) + for i := range s { + m[s[i]] = struct{}{} + } + + if len(s) != len(m) { + panic("duplicate keys") + } + + return m +} + +// LimitRunes limits the number of runes in a string to the given maximum. +// It returns the potentially truncated string and a boolean indicating whether truncation occurred. +func LimitRunes(s string, maxRunes int) (string, bool) { + runes := []rune(s) + if len(runes) > maxRunes { + return string(runes[:maxRunes]), true + } + + return s, false +} + +// LimitBytes limits the number of bytes in a string to the given maximum. +// It returns the potentially truncated string and a boolean indicating whether truncation occurred. +func LimitBytes(s string, maxBytes int) (string, bool) { + if len(s) > maxBytes { + return s[:maxBytes], true + } + return s, false +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/utils_serial_gen.go b/vendor/github.com/mattermost/mattermost/server/public/model/utils_serial_gen.go new file mode 100644 index 00000000..b14631b3 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/utils_serial_gen.go @@ -0,0 +1,93 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +// Code generated by github.com/tinylib/msgp DO NOT EDIT. + +import ( + "github.com/tinylib/msgp/msgp" +) + +// DecodeMsg implements msgp.Decodable +func (z *StringArray) DecodeMsg(dc *msgp.Reader) (err error) { + var zb0002 uint32 + zb0002, err = dc.ReadArrayHeader() + if err != nil { + err = msgp.WrapError(err) + return + } + if cap((*z)) >= int(zb0002) { + (*z) = (*z)[:zb0002] + } else { + (*z) = make(StringArray, zb0002) + } + for zb0001 := range *z { + (*z)[zb0001], err = dc.ReadString() + if err != nil { + err = msgp.WrapError(err, zb0001) + return + } + } + return +} + +// EncodeMsg implements msgp.Encodable +func (z StringArray) EncodeMsg(en *msgp.Writer) (err error) { + err = en.WriteArrayHeader(uint32(len(z))) + if err != nil { + err = msgp.WrapError(err) + return + } + for zb0003 := range z { + err = en.WriteString(z[zb0003]) + if err != nil { + err = msgp.WrapError(err, zb0003) + return + } + } + return +} + +// MarshalMsg implements msgp.Marshaler +func (z StringArray) MarshalMsg(b []byte) (o []byte, err error) { + o = msgp.Require(b, z.Msgsize()) + o = msgp.AppendArrayHeader(o, uint32(len(z))) + for zb0003 := range z { + o = msgp.AppendString(o, z[zb0003]) + } + return +} + +// UnmarshalMsg implements msgp.Unmarshaler +func (z *StringArray) UnmarshalMsg(bts []byte) (o []byte, err error) { + var zb0002 uint32 + zb0002, bts, err = msgp.ReadArrayHeaderBytes(bts) + if err != nil { + err = msgp.WrapError(err) + return + } + if cap((*z)) >= int(zb0002) { + (*z) = (*z)[:zb0002] + } else { + (*z) = make(StringArray, zb0002) + } + for zb0001 := range *z { + (*z)[zb0001], bts, err = msgp.ReadStringBytes(bts) + if err != nil { + err = msgp.WrapError(err, zb0001) + return + } + } + o = bts + return +} + +// Msgsize returns an upper bound estimate of the number of bytes occupied by the serialized message +func (z StringArray) Msgsize() (s int) { + s = msgp.ArrayHeaderSize + for zb0003 := range z { + s += msgp.StringPrefixSize + len(z[zb0003]) + } + return +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/version.go b/vendor/github.com/mattermost/mattermost/server/public/model/version.go new file mode 100644 index 00000000..4bf61ac6 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/version.go @@ -0,0 +1,240 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "fmt" + "strconv" + "strings" +) + +// This is a list of all the current versions including any patches. +// It should be maintained in chronological order with most current +// release at the front of the list. +var versions = []string{ + "11.2.1", + "11.2.0", + "11.1.0", + "11.0.0", + "10.12.0", + "10.11.0", + "10.10.0", + "10.9.0", + "10.8.0", + "10.7.0", + "10.6.0", + "10.5.0", + "10.4.0", + "10.3.0", + "10.2.0", + "10.1.0", + "10.0.0", + "9.11.0", + "9.10.0", + "9.9.0", + "9.8.0", + "9.7.0", + "9.6.0", + "9.5.0", + "9.4.0", + "9.3.0", + "9.2.0", + "9.1.0", + "9.0.0", + "8.1.0", + "8.0.0", + "7.11.0", + "7.10.0", + "7.9.0", + "7.8.0", + "7.7.0", + "7.6.0", + "7.5.0", + "7.4.0", + "7.3.0", + "7.2.0", + "7.1.0", + "7.0.0", + "6.7.0", + "6.6.0", + "6.5.0", + "6.4.0", + "6.3.0", + "6.2.0", + "6.1.0", + "6.0.0", + "5.39.0", + "5.38.0", + "5.37.0", + "5.36.0", + "5.35.0", + "5.34.0", + "5.33.0", + "5.32.0", + "5.31.0", + "5.30.0", + "5.29.0", + "5.28.0", + "5.27.0", + "5.26.0", + "5.25.0", + "5.24.0", + "5.23.0", + "5.22.0", + "5.21.0", + "5.20.0", + "5.19.0", + "5.18.0", + "5.17.0", + "5.16.0", + "5.15.0", + "5.14.0", + "5.13.0", + "5.12.0", + "5.11.0", + "5.10.0", + "5.9.0", + "5.8.0", + "5.7.0", + "5.6.0", + "5.5.0", + "5.4.0", + "5.3.0", + "5.2.0", + "5.1.0", + "5.0.0", + "4.10.0", + "4.9.0", + "4.8.1", + "4.8.0", + "4.7.2", + "4.7.1", + "4.7.0", + "4.6.0", + "4.5.0", + "4.4.0", + "4.3.0", + "4.2.0", + "4.1.0", + "4.0.0", + "3.10.0", + "3.9.0", + "3.8.0", + "3.7.0", + "3.6.0", + "3.5.0", + "3.4.0", + "3.3.0", + "3.2.0", + "3.1.0", + "3.0.0", + "2.2.0", + "2.1.0", + "2.0.0", + "1.4.0", + "1.3.0", + "1.2.1", + "1.2.0", + "1.1.0", + "1.0.0", + "0.7.1", + "0.7.0", + "0.6.0", + "0.5.0", +} + +var CurrentVersion = versions[0] +var BuildNumber string +var BuildDate string +var BuildHash string +var BuildHashEnterprise string +var BuildEnterpriseReady string +var versionsWithoutHotFixes []string + +func init() { + versionsWithoutHotFixes = make([]string, 0, len(versions)) + seen := make(map[string]string) + + for _, version := range versions { + major, minor, _ := SplitVersion(version) + verStr := fmt.Sprintf("%v.%v.0", major, minor) + + if seen[verStr] == "" { + versionsWithoutHotFixes = append(versionsWithoutHotFixes, verStr) + seen[verStr] = verStr + } + } +} + +func SplitVersion(version string) (int64, int64, int64) { + parts := strings.Split(version, ".") + + major := int64(0) + minor := int64(0) + patch := int64(0) + + if len(parts) > 0 { + major, _ = strconv.ParseInt(parts[0], 10, 64) + } + + if len(parts) > 1 { + minor, _ = strconv.ParseInt(parts[1], 10, 64) + } + + if len(parts) > 2 { + patch, _ = strconv.ParseInt(parts[2], 10, 64) + } + + return major, minor, patch +} + +func GetPreviousVersion(version string) string { + verMajor, verMinor, _ := SplitVersion(version) + verStr := fmt.Sprintf("%v.%v.0", verMajor, verMinor) + + for index, v := range versionsWithoutHotFixes { + if v == verStr && len(versionsWithoutHotFixes) > index+1 { + return versionsWithoutHotFixes[index+1] + } + } + + return "" +} + +func IsCurrentVersion(versionToCheck string) bool { + currentMajor, currentMinor, _ := SplitVersion(CurrentVersion) + toCheckMajor, toCheckMinor, _ := SplitVersion(versionToCheck) + + if toCheckMajor == currentMajor && toCheckMinor == currentMinor { + return true + } + return false +} + +func IsPreviousVersionsSupported(versionToCheck string) bool { + toCheckMajor, toCheckMinor, _ := SplitVersion(versionToCheck) + versionToCheckStr := fmt.Sprintf("%v.%v.0", toCheckMajor, toCheckMinor) + + // Current Supported + if versionsWithoutHotFixes[0] == versionToCheckStr { + return true + } + + // Current - 1 Supported + if versionsWithoutHotFixes[1] == versionToCheckStr { + return true + } + + // Current - 2 Supported + if versionsWithoutHotFixes[2] == versionToCheckStr { + return true + } + + // Current - 3 Supported + if versionsWithoutHotFixes[3] == versionToCheckStr { + return true + } + + return false +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/websocket_client.go b/vendor/github.com/mattermost/mattermost/server/public/model/websocket_client.go new file mode 100644 index 00000000..559218e2 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/websocket_client.go @@ -0,0 +1,392 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "bytes" + "encoding/json" + "fmt" + "net/http" + "sync/atomic" + "time" + + "github.com/mattermost/mattermost/server/public/shared/mlog" + + "github.com/gorilla/websocket" + "github.com/vmihailenco/msgpack/v5" +) + +const ( + SocketMaxMessageSizeKb = 8 * 1024 // 8KB + PingTimeoutBufferSeconds = 5 +) + +type msgType int + +const ( + msgTypeJSON msgType = iota + 1 + msgTypePong + msgTypeBinary +) + +type writeMessage struct { + msgType msgType + data any +} + +const avgReadMsgSizeBytes = 1024 + +// WebSocketClient stores the necessary information required to +// communicate with a WebSocket endpoint. +// A client must read from PingTimeoutChannel, EventChannel and ResponseChannel to prevent +// deadlocks from occurring in the program. +type WebSocketClient struct { + URL string // The location of the server like "ws://localhost:8065" + APIURL string // The API location of the server like "ws://localhost:8065/api/v3" + ConnectURL string // The WebSocket URL to connect to like "ws://localhost:8065/api/v3/path/to/websocket" + Conn *websocket.Conn // The WebSocket connection + AuthToken string // The token used to open the WebSocket connection + Sequence int64 // The ever-incrementing sequence attached to each WebSocket action + PingTimeoutChannel chan bool // The channel used to signal ping timeouts + EventChannel chan *WebSocketEvent // The channel used to receive various events pushed from the server. For example: typing, posted + ResponseChannel chan *WebSocketResponse // The channel used to receive responses for requests made to the server + ListenError *AppError // A field that is set if there was an abnormal closure of the WebSocket connection + writeChan chan writeMessage + + pingTimeoutTimer *time.Timer + quitPingWatchdog chan struct{} + + quitWriterChan chan struct{} + resetTimerChan chan struct{} + closed int32 +} + +// NewWebSocketClient constructs a new WebSocket client with convenience +// methods for talking to the server. +func NewWebSocketClient(url, authToken string) (*WebSocketClient, error) { + return NewWebSocketClientWithDialer(websocket.DefaultDialer, url, authToken) +} + +func NewReliableWebSocketClientWithDialer(dialer *websocket.Dialer, url, authToken, connID string, seqNo int, withAuthHeader bool) (*WebSocketClient, error) { + connectURL := url + APIURLSuffix + "/websocket" + fmt.Sprintf("?connection_id=%s&sequence_number=%d", connID, seqNo) + var header http.Header + if withAuthHeader { + header = http.Header{ + "Authorization": []string{"Bearer " + authToken}, + } + } + + return makeClient(dialer, url, connectURL, authToken, header) +} + +// NewWebSocketClientWithDialer constructs a new WebSocket client with convenience +// methods for talking to the server using a custom dialer. +func NewWebSocketClientWithDialer(dialer *websocket.Dialer, url, authToken string) (*WebSocketClient, error) { + return makeClient(dialer, url, url+APIURLSuffix+"/websocket", authToken, nil) +} + +func makeClient(dialer *websocket.Dialer, url, connectURL, authToken string, header http.Header) (*WebSocketClient, error) { + conn, _, err := dialer.Dial(connectURL, header) + if err != nil { + return nil, NewAppError("NewWebSocketClient", "model.websocket_client.connect_fail.app_error", nil, "", http.StatusInternalServerError).Wrap(err) + } + + client := &WebSocketClient{ + URL: url, + APIURL: url + APIURLSuffix, + ConnectURL: connectURL, + Conn: conn, + AuthToken: authToken, + Sequence: 1, + PingTimeoutChannel: make(chan bool, 1), + EventChannel: make(chan *WebSocketEvent, 100), + ResponseChannel: make(chan *WebSocketResponse, 100), + writeChan: make(chan writeMessage), + quitPingWatchdog: make(chan struct{}), + quitWriterChan: make(chan struct{}), + resetTimerChan: make(chan struct{}), + } + + client.configurePingHandling() + go client.writer() + + client.SendMessage(string(WebsocketAuthenticationChallenge), map[string]any{"token": authToken}) + + return client, nil +} + +// NewWebSocketClient4 constructs a new WebSocket client with convenience +// methods for talking to the server. Uses the v4 endpoint. +func NewWebSocketClient4(url, authToken string) (*WebSocketClient, error) { + return NewWebSocketClient4WithDialer(websocket.DefaultDialer, url, authToken) +} + +// NewWebSocketClient4WithDialer constructs a new WebSocket client with convenience +// methods for talking to the server using a custom dialer. Uses the v4 endpoint. +func NewWebSocketClient4WithDialer(dialer *websocket.Dialer, url, authToken string) (*WebSocketClient, error) { + return NewWebSocketClientWithDialer(dialer, url, authToken) +} + +// Connect creates a websocket connection with the given ConnectURL. +// This is racy and error-prone should not be used. Use any of the New* functions to create a websocket. +func (wsc *WebSocketClient) Connect() *AppError { + return wsc.ConnectWithDialer(websocket.DefaultDialer) +} + +// ConnectWithDialer creates a websocket connection with the given ConnectURL using the dialer. +// This is racy and error-prone and should not be used. Use any of the New* functions to create a websocket. +func (wsc *WebSocketClient) ConnectWithDialer(dialer *websocket.Dialer) *AppError { + var err error + wsc.Conn, _, err = dialer.Dial(wsc.ConnectURL, nil) + if err != nil { + return NewAppError("Connect", "model.websocket_client.connect_fail.app_error", nil, "", http.StatusInternalServerError).Wrap(err) + } + // Super racy and should not be done anyways. + // All of this needs to be redesigned for v6. + wsc.configurePingHandling() + // If it has been closed before, we just restart the writer. + if atomic.CompareAndSwapInt32(&wsc.closed, 1, 0) { + wsc.writeChan = make(chan writeMessage) + wsc.quitWriterChan = make(chan struct{}) + go wsc.writer() + wsc.resetTimerChan = make(chan struct{}) + wsc.quitPingWatchdog = make(chan struct{}) + } + + wsc.EventChannel = make(chan *WebSocketEvent, 100) + wsc.ResponseChannel = make(chan *WebSocketResponse, 100) + + wsc.SendMessage(string(WebsocketAuthenticationChallenge), map[string]any{"token": wsc.AuthToken}) + + return nil +} + +// Close closes the websocket client. It is recommended that a closed client should not be +// reused again. Rather a new client should be created anew. +func (wsc *WebSocketClient) Close() { + // CAS to 1 and proceed. Return if already 1. + if !atomic.CompareAndSwapInt32(&wsc.closed, 0, 1) { + return + } + wsc.quitWriterChan <- struct{}{} + close(wsc.writeChan) + // We close the connection, which breaks the reader loop. + // Then we let the defer block in the reader do further cleanup. + wsc.Conn.Close() +} + +// TODO: un-export the Conn so that Write methods go through the writer +func (wsc *WebSocketClient) writer() { + for { + select { + case msg := <-wsc.writeChan: + switch msg.msgType { + case msgTypeJSON: + wsc.Conn.WriteJSON(msg.data) + case msgTypeBinary: + if data, ok := msg.data.([]byte); ok { + wsc.Conn.WriteMessage(websocket.BinaryMessage, data) + } + case msgTypePong: + wsc.Conn.WriteMessage(websocket.PongMessage, []byte{}) + } + case <-wsc.quitWriterChan: + return + } + } +} + +// Listen starts the read loop of the websocket client. +func (wsc *WebSocketClient) Listen() { + // This loop can exit in 2 conditions: + // 1. Either the connection breaks naturally. + // 2. Close was explicitly called, which closes the connection manually. + // + // Due to the way the API is written, there is a requirement that a client may NOT + // call Listen at all and can still call Close and Connect. + // Therefore, we let the cleanup of the reader stuff rely on closing the connection + // and then we do the cleanup in the defer block. + // + // First, we close some channels and then CAS to 1 and proceed to close the writer chan also. + // This is needed because then the defer clause does not double-close the writer when (2) happens. + // But if (1) happens, we set the closed bit, and close the rest of the stuff. + go func() { + defer func() { + close(wsc.EventChannel) + close(wsc.ResponseChannel) + close(wsc.quitPingWatchdog) + close(wsc.resetTimerChan) + // We CAS to 1 and proceed. + if !atomic.CompareAndSwapInt32(&wsc.closed, 0, 1) { + return + } + wsc.quitWriterChan <- struct{}{} + close(wsc.writeChan) + wsc.Conn.Close() // This can most likely be removed. Needs to be checked. + }() + + var buf bytes.Buffer + buf.Grow(avgReadMsgSizeBytes) + + for { + // Reset buffer. + buf.Reset() + _, r, err := wsc.Conn.NextReader() + if err != nil { + if !websocket.IsCloseError(err, websocket.CloseNormalClosure, websocket.CloseNoStatusReceived) { + wsc.ListenError = NewAppError("NewWebSocketClient", "model.websocket_client.connect_fail.app_error", nil, "", http.StatusInternalServerError).Wrap(err) + } + return + } + // Use pre-allocated buffer. + _, err = buf.ReadFrom(r) + if err != nil { + // This should use a different error ID, but en.json is not imported anyways. + // It's a different bug altogether but we let it be for now. + // See MM-24520. + wsc.ListenError = NewAppError("NewWebSocketClient", "model.websocket_client.connect_fail.app_error", nil, "", http.StatusInternalServerError).Wrap(err) + return + } + + event, jsonErr := WebSocketEventFromJSON(bytes.NewReader(buf.Bytes())) + if jsonErr != nil { + mlog.Warn("Failed to decode from JSON", mlog.Err(jsonErr)) + continue + } + if event.IsValid() { + wsc.EventChannel <- event + continue + } + + var response WebSocketResponse + if err := json.Unmarshal(buf.Bytes(), &response); err == nil && response.IsValid() { + wsc.ResponseChannel <- &response + continue + } + } + }() +} + +func (wsc *WebSocketClient) SendMessage(action string, data map[string]any) { + req := &WebSocketRequest{} + req.Seq = wsc.Sequence + req.Action = action + req.Data = data + + wsc.Sequence++ + wsc.writeChan <- writeMessage{ + msgType: msgTypeJSON, + data: req, + } +} + +func (wsc *WebSocketClient) SendBinaryMessage(action string, data map[string]any) error { + req := &WebSocketRequest{} + req.Seq = wsc.Sequence + req.Action = action + req.Data = data + + binaryData, err := msgpack.Marshal(req) + if err != nil { + return fmt.Errorf("failed to marshal request to msgpack: %w", err) + } + + wsc.Sequence++ + wsc.writeChan <- writeMessage{ + msgType: msgTypeBinary, + data: binaryData, + } + + return nil +} + +// UserTyping will push a user_typing event out to all connected users +// who are in the specified channel +func (wsc *WebSocketClient) UserTyping(channelId, parentId string) { + data := map[string]any{ + "channel_id": channelId, + "parent_id": parentId, + } + + wsc.SendMessage("user_typing", data) +} + +// GetStatuses will return a map of string statuses using user id as the key +func (wsc *WebSocketClient) GetStatuses() { + wsc.SendMessage("get_statuses", nil) +} + +// GetStatusesByIds will fetch certain user statuses based on ids and return +// a map of string statuses using user id as the key +func (wsc *WebSocketClient) GetStatusesByIds(userIds []string) { + data := map[string]any{ + "user_ids": userIds, + } + wsc.SendMessage("get_statuses_by_ids", data) +} + +// UpdateActiveChannel sets the current channel that the user is viewing. +func (wsc *WebSocketClient) UpdateActiveChannel(channelID string) { + data := map[string]any{ + "channel_id": channelID, + } + wsc.SendMessage(string(WebsocketPresenceIndicator), data) +} + +// UpdateActiveTeam sets the current team that the user is in. +func (wsc *WebSocketClient) UpdateActiveTeam(teamID string) { + data := map[string]any{ + "team_id": teamID, + } + wsc.SendMessage(string(WebsocketPresenceIndicator), data) +} + +// UpdateActiveThread sets the channel id of the current thread that the user is in. +func (wsc *WebSocketClient) UpdateActiveThread(isThreadView bool, channelID string) { + data := map[string]any{ + "thread_channel_id": channelID, + "is_thread_view": isThreadView, + } + wsc.SendMessage(string(WebsocketPresenceIndicator), data) +} + +func (wsc *WebSocketClient) configurePingHandling() { + wsc.Conn.SetPingHandler(wsc.pingHandler) + wsc.pingTimeoutTimer = time.NewTimer(time.Second * (60 + PingTimeoutBufferSeconds)) + go wsc.pingWatchdog() +} + +func (wsc *WebSocketClient) pingHandler(appData string) error { + if atomic.LoadInt32(&wsc.closed) == 1 { + return nil + } + wsc.resetTimerChan <- struct{}{} + wsc.writeChan <- writeMessage{ + msgType: msgTypePong, + } + return nil +} + +// pingWatchdog is used to send values to the PingTimeoutChannel whenever a timeout occurs. +// We use the resetTimerChan from the pingHandler to pass the signal, and then reset the timer +// after draining it. And if the timer naturally expires, we also extend it to prevent it from +// being deadlocked when the resetTimerChan case runs. Because timer.Stop would return false, +// and the code would be forever stuck trying to read from C. +func (wsc *WebSocketClient) pingWatchdog() { + for { + select { + case <-wsc.resetTimerChan: + if !wsc.pingTimeoutTimer.Stop() { + <-wsc.pingTimeoutTimer.C + } + wsc.pingTimeoutTimer.Reset(time.Second * (60 + PingTimeoutBufferSeconds)) + + case <-wsc.pingTimeoutTimer.C: + wsc.PingTimeoutChannel <- true + wsc.pingTimeoutTimer.Reset(time.Second * (60 + PingTimeoutBufferSeconds)) + case <-wsc.quitPingWatchdog: + return + } + } +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/websocket_message.go b/vendor/github.com/mattermost/mattermost/server/public/model/websocket_message.go new file mode 100644 index 00000000..9aea36af --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/websocket_message.go @@ -0,0 +1,452 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "encoding/json" + "io" + "maps" + "strconv" +) + +type WebsocketEventType string + +const ( + WebsocketEventTyping WebsocketEventType = "typing" + WebsocketEventPosted WebsocketEventType = "posted" + WebsocketEventPostEdited WebsocketEventType = "post_edited" + WebsocketEventPostDeleted WebsocketEventType = "post_deleted" + WebsocketEventPostUnread WebsocketEventType = "post_unread" + WebsocketEventChannelConverted WebsocketEventType = "channel_converted" + WebsocketEventChannelCreated WebsocketEventType = "channel_created" + WebsocketEventChannelDeleted WebsocketEventType = "channel_deleted" + WebsocketEventChannelRestored WebsocketEventType = "channel_restored" + WebsocketEventChannelUpdated WebsocketEventType = "channel_updated" + WebsocketEventChannelMemberUpdated WebsocketEventType = "channel_member_updated" + WebsocketEventChannelSchemeUpdated WebsocketEventType = "channel_scheme_updated" + WebsocketEventDirectAdded WebsocketEventType = "direct_added" + WebsocketEventGroupAdded WebsocketEventType = "group_added" + WebsocketEventNewUser WebsocketEventType = "new_user" + WebsocketEventAddedToTeam WebsocketEventType = "added_to_team" + WebsocketEventLeaveTeam WebsocketEventType = "leave_team" + WebsocketEventUpdateTeam WebsocketEventType = "update_team" + WebsocketEventDeleteTeam WebsocketEventType = "delete_team" + WebsocketEventRestoreTeam WebsocketEventType = "restore_team" + WebsocketEventUpdateTeamScheme WebsocketEventType = "update_team_scheme" + WebsocketEventUserAdded WebsocketEventType = "user_added" + WebsocketEventUserUpdated WebsocketEventType = "user_updated" + WebsocketEventUserRoleUpdated WebsocketEventType = "user_role_updated" + WebsocketEventMemberroleUpdated WebsocketEventType = "memberrole_updated" + WebsocketEventUserRemoved WebsocketEventType = "user_removed" + WebsocketEventPreferenceChanged WebsocketEventType = "preference_changed" + WebsocketEventPreferencesChanged WebsocketEventType = "preferences_changed" + WebsocketEventPreferencesDeleted WebsocketEventType = "preferences_deleted" + WebsocketEventEphemeralMessage WebsocketEventType = "ephemeral_message" + WebsocketEventStatusChange WebsocketEventType = "status_change" + WebsocketEventHello WebsocketEventType = "hello" + WebsocketAuthenticationChallenge WebsocketEventType = "authentication_challenge" + WebsocketEventReactionAdded WebsocketEventType = "reaction_added" + WebsocketEventReactionRemoved WebsocketEventType = "reaction_removed" + WebsocketEventResponse WebsocketEventType = "response" + WebsocketEventEmojiAdded WebsocketEventType = "emoji_added" + WebsocketEventChannelViewed WebsocketEventType = "channel_viewed" + WebsocketEventMultipleChannelsViewed WebsocketEventType = "multiple_channels_viewed" + WebsocketEventPluginStatusesChanged WebsocketEventType = "plugin_statuses_changed" + WebsocketEventPluginEnabled WebsocketEventType = "plugin_enabled" + WebsocketEventPluginDisabled WebsocketEventType = "plugin_disabled" + WebsocketEventRoleUpdated WebsocketEventType = "role_updated" + WebsocketEventLicenseChanged WebsocketEventType = "license_changed" + WebsocketEventConfigChanged WebsocketEventType = "config_changed" + WebsocketEventOpenDialog WebsocketEventType = "open_dialog" + WebsocketEventGuestsDeactivated WebsocketEventType = "guests_deactivated" + WebsocketEventUserActivationStatusChange WebsocketEventType = "user_activation_status_change" + WebsocketEventReceivedGroup WebsocketEventType = "received_group" + WebsocketEventReceivedGroupAssociatedToTeam WebsocketEventType = "received_group_associated_to_team" + WebsocketEventReceivedGroupNotAssociatedToTeam WebsocketEventType = "received_group_not_associated_to_team" + WebsocketEventReceivedGroupAssociatedToChannel WebsocketEventType = "received_group_associated_to_channel" + WebsocketEventReceivedGroupNotAssociatedToChannel WebsocketEventType = "received_group_not_associated_to_channel" + WebsocketEventGroupMemberDelete WebsocketEventType = "group_member_deleted" + WebsocketEventGroupMemberAdd WebsocketEventType = "group_member_add" + WebsocketEventSidebarCategoryCreated WebsocketEventType = "sidebar_category_created" + WebsocketEventSidebarCategoryUpdated WebsocketEventType = "sidebar_category_updated" + WebsocketEventSidebarCategoryDeleted WebsocketEventType = "sidebar_category_deleted" + WebsocketEventSidebarCategoryOrderUpdated WebsocketEventType = "sidebar_category_order_updated" + WebsocketEventCloudPaymentStatusUpdated WebsocketEventType = "cloud_payment_status_updated" + WebsocketEventCloudSubscriptionChanged WebsocketEventType = "cloud_subscription_changed" + WebsocketEventThreadUpdated WebsocketEventType = "thread_updated" + WebsocketEventThreadFollowChanged WebsocketEventType = "thread_follow_changed" + WebsocketEventThreadReadChanged WebsocketEventType = "thread_read_changed" + WebsocketFirstAdminVisitMarketplaceStatusReceived WebsocketEventType = "first_admin_visit_marketplace_status_received" + WebsocketEventDraftCreated WebsocketEventType = "draft_created" + WebsocketEventDraftUpdated WebsocketEventType = "draft_updated" + WebsocketEventDraftDeleted WebsocketEventType = "draft_deleted" + WebsocketEventAcknowledgementAdded WebsocketEventType = "post_acknowledgement_added" + WebsocketEventAcknowledgementRemoved WebsocketEventType = "post_acknowledgement_removed" + WebsocketEventPersistentNotificationTriggered WebsocketEventType = "persistent_notification_triggered" + WebsocketEventHostedCustomerSignupProgressUpdated WebsocketEventType = "hosted_customer_signup_progress_updated" + WebsocketEventChannelBookmarkCreated WebsocketEventType = "channel_bookmark_created" + WebsocketEventChannelBookmarkUpdated WebsocketEventType = "channel_bookmark_updated" + WebsocketEventChannelBookmarkDeleted WebsocketEventType = "channel_bookmark_deleted" + WebsocketEventChannelBookmarkSorted WebsocketEventType = "channel_bookmark_sorted" + WebsocketPresenceIndicator WebsocketEventType = "presence" + WebsocketPostedNotifyAck WebsocketEventType = "posted_notify_ack" + WebsocketScheduledPostCreated WebsocketEventType = "scheduled_post_created" + WebsocketScheduledPostUpdated WebsocketEventType = "scheduled_post_updated" + WebsocketScheduledPostDeleted WebsocketEventType = "scheduled_post_deleted" + WebsocketEventCPAFieldCreated WebsocketEventType = "custom_profile_attributes_field_created" + WebsocketEventCPAFieldUpdated WebsocketEventType = "custom_profile_attributes_field_updated" + WebsocketEventCPAFieldDeleted WebsocketEventType = "custom_profile_attributes_field_deleted" + WebsocketEventCPAValuesUpdated WebsocketEventType = "custom_profile_attributes_values_updated" + WebsocketContentFlaggingReportValueUpdated WebsocketEventType = "content_flagging_report_value_updated" + + WebSocketMsgTypeResponse = "response" + WebSocketMsgTypeEvent = "event" +) + +type ActiveQueueItem struct { + Type string `json:"type"` // websocket event or websocket response + Buf json.RawMessage `json:"buf"` +} + +type WSQueues struct { + ActiveQ []ActiveQueueItem `json:"active_queue"` // websocketEvent|websocketResponse + DeadQ []json.RawMessage `json:"dead_queue"` // websocketEvent + ReuseCount int `json:"reuse_count"` +} + +type WebSocketMessage interface { + ToJSON() ([]byte, error) + IsValid() bool + EventType() WebsocketEventType +} + +type WebsocketBroadcast struct { + OmitUsers map[string]bool `json:"omit_users"` // broadcast is omitted for users listed here + UserId string `json:"user_id"` // broadcast only occurs for this user + ChannelId string `json:"channel_id"` // broadcast only occurs for users in this channel + TeamId string `json:"team_id"` // broadcast only occurs for users in this team + ConnectionId string `json:"connection_id"` // broadcast only occurs for this connection + OmitConnectionId string `json:"omit_connection_id"` // broadcast is omitted for this connection + ContainsSanitizedData bool `json:"contains_sanitized_data,omitempty"` // broadcast only occurs for non-sysadmins + ContainsSensitiveData bool `json:"contains_sensitive_data,omitempty"` // broadcast only occurs for sysadmins + // ReliableClusterSend indicates whether or not the message should + // be sent through the cluster using the reliable, TCP backed channel. + ReliableClusterSend bool `json:"-"` + + // BroadcastHooks is a slice of hooks IDs used to process events before sending them on individual connections. The + // IDs should be understood by the WebSocket code. + // + // This field should never be sent to the client. + BroadcastHooks []string `json:"broadcast_hooks,omitempty"` + // BroadcastHookArgs is a slice of named arguments for each hook invocation. The index of each entry corresponds to + // the index of a hook ID in BroadcastHooks + // + // This field should never be sent to the client. + BroadcastHookArgs []map[string]any `json:"broadcast_hook_args,omitempty"` +} + +func (wb *WebsocketBroadcast) copy() *WebsocketBroadcast { + if wb == nil { + return nil + } + + var c WebsocketBroadcast + if wb.OmitUsers != nil { + c.OmitUsers = make(map[string]bool, len(wb.OmitUsers)) + maps.Copy(c.OmitUsers, wb.OmitUsers) + } + c.UserId = wb.UserId + c.ChannelId = wb.ChannelId + c.TeamId = wb.TeamId + c.OmitConnectionId = wb.OmitConnectionId + c.ContainsSanitizedData = wb.ContainsSanitizedData + c.ContainsSensitiveData = wb.ContainsSensitiveData + c.BroadcastHooks = wb.BroadcastHooks + c.BroadcastHookArgs = wb.BroadcastHookArgs + + return &c +} + +func (wb *WebsocketBroadcast) AddHook(hookID string, hookArgs map[string]any) { + wb.BroadcastHooks = append(wb.BroadcastHooks, hookID) + wb.BroadcastHookArgs = append(wb.BroadcastHookArgs, hookArgs) +} + +type precomputedWebSocketEventJSON struct { + Event json.RawMessage + Data json.RawMessage + Broadcast json.RawMessage +} + +func (p *precomputedWebSocketEventJSON) copy() *precomputedWebSocketEventJSON { + if p == nil { + return nil + } + + var c precomputedWebSocketEventJSON + + if p.Event != nil { + c.Event = make([]byte, len(p.Event)) + copy(c.Event, p.Event) + } + + if p.Data != nil { + c.Data = make([]byte, len(p.Data)) + copy(c.Data, p.Data) + } + + if p.Broadcast != nil { + c.Broadcast = make([]byte, len(p.Broadcast)) + copy(c.Broadcast, p.Broadcast) + } + + return &c +} + +// webSocketEventJSON mirrors WebSocketEvent to make some of its unexported fields serializable +type webSocketEventJSON struct { + Event WebsocketEventType `json:"event"` + Data map[string]any `json:"data"` + Broadcast *WebsocketBroadcast `json:"broadcast"` + Sequence int64 `json:"seq"` +} + +type WebSocketEvent struct { + event WebsocketEventType + data map[string]any + broadcast *WebsocketBroadcast + sequence int64 + precomputedJSON *precomputedWebSocketEventJSON +} + +// PrecomputeJSON precomputes and stores the serialized JSON for all fields other than Sequence. +// This makes ToJSON much more efficient when sending the same event to multiple connections. +func (ev *WebSocketEvent) PrecomputeJSON() *WebSocketEvent { + evCopy := ev.Copy() + event, _ := json.Marshal(evCopy.event) + data, _ := json.Marshal(evCopy.data) + broadcast, _ := json.Marshal(evCopy.broadcast) + evCopy.precomputedJSON = &precomputedWebSocketEventJSON{ + Event: json.RawMessage(event), + Data: json.RawMessage(data), + Broadcast: json.RawMessage(broadcast), + } + return evCopy +} + +func (ev *WebSocketEvent) RemovePrecomputedJSON() *WebSocketEvent { + evCopy := ev.DeepCopy() + evCopy.precomputedJSON = nil + return evCopy +} + +// WithoutBroadcastHooks gets the broadcast hook information from a WebSocketEvent and returns the event without that. +// If the event has broadcast hooks, a copy of the event is returned. Otherwise, the original event is returned. This +// is intended to be called before the event is sent to the client. +func (ev *WebSocketEvent) WithoutBroadcastHooks() (*WebSocketEvent, []string, []map[string]any) { + hooks := ev.broadcast.BroadcastHooks + hookArgs := ev.broadcast.BroadcastHookArgs + + if len(hooks) == 0 && len(hookArgs) == 0 { + return ev, hooks, hookArgs + } + + evCopy := ev.Copy() + evCopy.broadcast = ev.broadcast.copy() + + evCopy.broadcast.BroadcastHooks = nil + evCopy.broadcast.BroadcastHookArgs = nil + + return evCopy, hooks, hookArgs +} + +func (ev *WebSocketEvent) Add(key string, value any) { + ev.data[key] = value +} + +func NewWebSocketEvent(event WebsocketEventType, teamId, channelId, userId string, omitUsers map[string]bool, omitConnectionId string) *WebSocketEvent { + return &WebSocketEvent{ + event: event, + data: make(map[string]any), + broadcast: &WebsocketBroadcast{ + TeamId: teamId, + ChannelId: channelId, + UserId: userId, + OmitUsers: omitUsers, + OmitConnectionId: omitConnectionId}, + } +} + +func (ev *WebSocketEvent) Copy() *WebSocketEvent { + evCopy := &WebSocketEvent{ + event: ev.event, + data: ev.data, + broadcast: ev.broadcast, + sequence: ev.sequence, + precomputedJSON: ev.precomputedJSON, + } + return evCopy +} + +func (ev *WebSocketEvent) DeepCopy() *WebSocketEvent { + evCopy := &WebSocketEvent{ + event: ev.event, + data: maps.Clone(ev.data), + broadcast: ev.broadcast.copy(), + sequence: ev.sequence, + precomputedJSON: ev.precomputedJSON.copy(), + } + return evCopy +} + +func (ev *WebSocketEvent) GetData() map[string]any { + return ev.data +} + +func (ev *WebSocketEvent) GetBroadcast() *WebsocketBroadcast { + return ev.broadcast +} + +func (ev *WebSocketEvent) GetSequence() int64 { + return ev.sequence +} + +func (ev *WebSocketEvent) SetEvent(event WebsocketEventType) *WebSocketEvent { + evCopy := ev.Copy() + evCopy.event = event + return evCopy +} + +func (ev *WebSocketEvent) SetData(data map[string]any) *WebSocketEvent { + evCopy := ev.Copy() + evCopy.data = data + return evCopy +} + +func (ev *WebSocketEvent) SetBroadcast(broadcast *WebsocketBroadcast) *WebSocketEvent { + evCopy := ev.Copy() + evCopy.broadcast = broadcast + return evCopy +} + +func (ev *WebSocketEvent) SetSequence(seq int64) *WebSocketEvent { + evCopy := ev.Copy() + evCopy.sequence = seq + return evCopy +} + +func (ev *WebSocketEvent) IsValid() bool { + return ev.event != "" +} + +func (ev *WebSocketEvent) EventType() WebsocketEventType { + return ev.event +} + +func (ev *WebSocketEvent) ToJSON() ([]byte, error) { + if ev.precomputedJSON != nil { + return ev.precomputedJSONBuf(), nil + } + return json.Marshal(webSocketEventJSON{ + ev.event, + ev.data, + ev.broadcast, + ev.sequence, + }) +} + +// Encode encodes the event to the given encoder. +func (ev *WebSocketEvent) Encode(enc *json.Encoder, buf io.Writer) error { + if ev.precomputedJSON != nil { + _, err := buf.Write(ev.precomputedJSONBuf()) + return err + } + + return enc.Encode(webSocketEventJSON{ + ev.event, + ev.data, + ev.broadcast, + ev.sequence, + }) +} + +// We write optimal code here sacrificing readability for +// performance. +func (ev *WebSocketEvent) precomputedJSONBuf() []byte { + return []byte(`{"event": ` + + string(ev.precomputedJSON.Event) + + `, "data": ` + + string(ev.precomputedJSON.Data) + + `, "broadcast": ` + + string(ev.precomputedJSON.Broadcast) + + `, "seq": ` + + strconv.Itoa(int(ev.sequence)) + + `}`) +} + +func WebSocketEventFromJSON(data io.Reader) (*WebSocketEvent, error) { + var ev WebSocketEvent + var o webSocketEventJSON + if err := json.NewDecoder(data).Decode(&o); err != nil { + return nil, err + } + ev.event = o.Event + if u, ok := o.Data["user"]; ok { + // We need to convert to and from JSON again + // because the user is in the form of a map[string]any. + buf, err := json.Marshal(u) + if err != nil { + return nil, err + } + + var user User + if err = json.Unmarshal(buf, &user); err != nil { + return nil, err + } + o.Data["user"] = &user + } + ev.data = o.Data + ev.broadcast = o.Broadcast + ev.sequence = o.Sequence + return &ev, nil +} + +// WebSocketResponse represents a response received through the WebSocket +// for a request made to the server. This is available through the ResponseChannel +// channel in WebSocketClient. +type WebSocketResponse struct { + Status string `json:"status"` // The status of the response. For example: OK, FAIL. + SeqReply int64 `json:"seq_reply,omitempty"` // A counter which is incremented for every response sent. + Data map[string]any `json:"data,omitempty"` // The data contained in the response. + Error *AppError `json:"error,omitempty"` // A field that is set if any error has occurred. +} + +func (m *WebSocketResponse) Add(key string, value any) { + m.Data[key] = value +} + +func NewWebSocketResponse(status string, seqReply int64, data map[string]any) *WebSocketResponse { + return &WebSocketResponse{Status: status, SeqReply: seqReply, Data: data} +} + +func NewWebSocketError(seqReply int64, err *AppError) *WebSocketResponse { + return &WebSocketResponse{Status: StatusFail, SeqReply: seqReply, Error: err} +} + +func (m *WebSocketResponse) IsValid() bool { + return m.Status != "" +} + +func (m *WebSocketResponse) EventType() WebsocketEventType { + return WebsocketEventResponse +} + +func (m *WebSocketResponse) ToJSON() ([]byte, error) { + return json.Marshal(m) +} + +func WebSocketResponseFromJSON(data io.Reader) (*WebSocketResponse, error) { + var o *WebSocketResponse + return o, json.NewDecoder(data).Decode(&o) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/websocket_request.go b/vendor/github.com/mattermost/mattermost/server/public/model/websocket_request.go new file mode 100644 index 00000000..12671f67 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/websocket_request.go @@ -0,0 +1,41 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +import ( + "github.com/mattermost/mattermost/server/public/shared/i18n" + + "github.com/vmihailenco/msgpack/v5" +) + +const ( + WebSocketRemoteAddr = "remote_addr" + WebSocketXForwardedFor = "x_forwarded_for" +) + +// WebSocketRequest represents a request made to the server through a websocket. +type WebSocketRequest struct { + // Client-provided fields + Seq int64 `json:"seq" msgpack:"seq"` // A counter which is incremented for every request made. + Action string `json:"action" msgpack:"action"` // The action to perform for a request. For example: get_statuses, user_typing. + Data map[string]any `json:"data" msgpack:"data"` // The metadata for an action. + + // Server-provided fields + Session Session `json:"-" msgpack:"-"` + T i18n.TranslateFunc `json:"-" msgpack:"-"` + Locale string `json:"-" msgpack:"-"` +} + +func (o *WebSocketRequest) Clone() (*WebSocketRequest, error) { + buf, err := msgpack.Marshal(o) + if err != nil { + return nil, err + } + var ret WebSocketRequest + err = msgpack.Unmarshal(buf, &ret) + if err != nil { + return nil, err + } + return &ret, nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/model/wrangler.go b/vendor/github.com/mattermost/mattermost/server/public/model/wrangler.go new file mode 100644 index 00000000..6a7261e6 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/model/wrangler.go @@ -0,0 +1,33 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package model + +// WranglerPostList provides a list of posts along with metadata about those +// posts. +type WranglerPostList struct { + Posts []*Post + ThreadUserIDs []string + EarlistPostTimestamp int64 + LatestPostTimestamp int64 + FileAttachmentCount int64 +} + +// NumPosts returns the number of posts in a post list. +func (wpl *WranglerPostList) NumPosts() int { + return len(wpl.Posts) +} + +// RootPost returns the root post in a post list. +func (wpl *WranglerPostList) RootPost() *Post { + if wpl.NumPosts() < 1 { + return nil + } + + return wpl.Posts[0] +} + +// ContainsFileAttachments returns if the post list contains any file attachments. +func (wpl *WranglerPostList) ContainsFileAttachments() bool { + return wpl.FileAttachmentCount != 0 +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/plugin/.mockery.yaml b/vendor/github.com/mattermost/mattermost/server/public/plugin/.mockery.yaml new file mode 100644 index 00000000..6f162d9d --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/plugin/.mockery.yaml @@ -0,0 +1,13 @@ +with-expecter: false +packages: + github.com/mattermost/mattermost/server/public/plugin: + config: + dir: "public/plugin/plugintest" + filename: "{{.InterfaceNameLower}}.go" + mockname: "{{.InterfaceName}}" + outpkg: "plugintest" + note: "Regenerate this file using `make plugin-mocks`." + interfaces: + API: + Hooks: + Driver: diff --git a/vendor/github.com/mattermost/mattermost/server/public/plugin/api.go b/vendor/github.com/mattermost/mattermost/server/public/plugin/api.go new file mode 100644 index 00000000..5f1c2eae --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/plugin/api.go @@ -0,0 +1,1573 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package plugin + +import ( + "io" + "net/http" + + plugin "github.com/hashicorp/go-plugin" + + "github.com/mattermost/mattermost/server/public/model" + "github.com/mattermost/mattermost/server/public/shared/mlog" +) + +// The API can be used to retrieve data or perform actions on behalf of the plugin. Most methods +// have direct counterparts in the REST API and very similar behavior. +// +// Plugins obtain access to the API by embedding MattermostPlugin and accessing the API member +// directly. +type API interface { + // LoadPluginConfiguration loads the plugin's configuration. dest should be a pointer to a + // struct that the configuration JSON can be unmarshalled to. + // + // @tag Plugin + // Minimum server version: 5.2 + LoadPluginConfiguration(dest any) error + + // RegisterCommand registers a custom slash command. When the command is triggered, your plugin + // can fulfill it via the ExecuteCommand hook. + // + // @tag Command + // Minimum server version: 5.2 + RegisterCommand(command *model.Command) error + + // UnregisterCommand unregisters a command previously register via RegisterCommand. + // + // @tag Command + // Minimum server version: 5.2 + UnregisterCommand(teamID, trigger string) error + + // ExecuteSlashCommand executes a slash command with the given parameters. + // + // @tag Command + // Minimum server version: 5.26 + ExecuteSlashCommand(commandArgs *model.CommandArgs) (*model.CommandResponse, error) + + // GetConfig fetches the currently persisted config + // + // @tag Configuration + // Minimum server version: 5.2 + GetConfig() *model.Config + + // GetUnsanitizedConfig fetches the currently persisted config without removing secrets. + // + // @tag Configuration + // Minimum server version: 5.16 + GetUnsanitizedConfig() *model.Config + + // SaveConfig sets the given config and persists the changes + // + // @tag Configuration + // Minimum server version: 5.2 + SaveConfig(config *model.Config) *model.AppError + + // GetPluginConfig fetches the currently persisted config of plugin + // + // @tag Plugin + // Minimum server version: 5.6 + GetPluginConfig() map[string]any + + // SavePluginConfig sets the given config for plugin and persists the changes + // + // @tag Plugin + // Minimum server version: 5.6 + SavePluginConfig(config map[string]any) *model.AppError + + // GetBundlePath returns the absolute path where the plugin's bundle was unpacked. + // + // @tag Plugin + // Minimum server version: 5.10 + GetBundlePath() (string, error) + + // GetLicense returns the current license used by the Mattermost server. Returns nil if + // the server does not have a license. + // + // @tag Server + // Minimum server version: 5.10 + GetLicense() *model.License + + // IsEnterpriseReady returns true if the Mattermost server is configured as Enterprise Ready. + // + // @tag Server + // Minimum server version: 5.10 + IsEnterpriseReady() bool + + // GetServerVersion return the current Mattermost server version + // + // @tag Server + // Minimum server version: 5.4 + GetServerVersion() string + + // GetSystemInstallDate returns the time that Mattermost was first installed and ran. + // + // @tag Server + // Minimum server version: 5.10 + GetSystemInstallDate() (int64, *model.AppError) + + // GetDiagnosticId returns a unique identifier used by the server for diagnostic reports. + // + // @tag Server + // Minimum server version: 5.10 + GetDiagnosticId() string + + // GetTelemetryId returns a unique identifier used by the server for telemetry reports. + // + // @tag Server + // Minimum server version: 5.28 + GetTelemetryId() string + + // CreateUser creates a user. + // + // @tag User + // Minimum server version: 5.2 + CreateUser(user *model.User) (*model.User, *model.AppError) + + // DeleteUser deletes a user. + // + // @tag User + // Minimum server version: 5.2 + DeleteUser(userID string) *model.AppError + + // GetUsers a list of users based on search options. + // + // Not all fields in UserGetOptions are supported by this API. + // + // @tag User + // Minimum server version: 5.10 + GetUsers(options *model.UserGetOptions) ([]*model.User, *model.AppError) + + // GetUsersByIds gets a list of users by their IDs. + // + // @tag User + // Minimum server version: 9.8 + GetUsersByIds(userIDs []string) ([]*model.User, *model.AppError) + + // GetUser gets a user. + // + // @tag User + // Minimum server version: 5.2 + GetUser(userID string) (*model.User, *model.AppError) + + // GetUserByEmail gets a user by their email address. + // + // @tag User + // Minimum server version: 5.2 + GetUserByEmail(email string) (*model.User, *model.AppError) + + // GetUserByUsername gets a user by their username. + // + // @tag User + // Minimum server version: 5.2 + GetUserByUsername(name string) (*model.User, *model.AppError) + + // GetUsersByUsernames gets users by their usernames. + // + // @tag User + // Minimum server version: 5.6 + GetUsersByUsernames(usernames []string) ([]*model.User, *model.AppError) + + // GetUsersInTeam gets users in team. + // + // @tag User + // @tag Team + // Minimum server version: 5.6 + GetUsersInTeam(teamID string, page int, perPage int) ([]*model.User, *model.AppError) + + // GetPreferenceForUser gets a single preference for a user. An error is returned if the user has no preference + // set with the given category and name, an error is returned. + // + // @tag User + // @tag Preference + // Minimum server version: 9.5 + GetPreferenceForUser(userID, category, name string) (model.Preference, *model.AppError) + + // GetPreferencesForUser gets a user's preferences. + // + // @tag User + // @tag Preference + // Minimum server version: 5.26 + GetPreferencesForUser(userID string) ([]model.Preference, *model.AppError) + + // UpdatePreferencesForUser updates a user's preferences. + // + // @tag User + // @tag Preference + // Minimum server version: 5.26 + UpdatePreferencesForUser(userID string, preferences []model.Preference) *model.AppError + + // DeletePreferencesForUser deletes a user's preferences. + // + // @tag User + // @tag Preference + // Minimum server version: 5.26 + DeletePreferencesForUser(userID string, preferences []model.Preference) *model.AppError + + // GetSession returns the session object for the Session ID + // + // + // Minimum server version: 5.2 + GetSession(sessionID string) (*model.Session, *model.AppError) + + // CreateSession creates a new user session. + // + // @tag User + // Minimum server version: 6.2 + CreateSession(session *model.Session) (*model.Session, *model.AppError) + + // ExtendSessionExpiry extends the duration of an existing session. + // + // @tag User + // Minimum server version: 6.2 + ExtendSessionExpiry(sessionID string, newExpiry int64) *model.AppError + + // RevokeSession revokes an existing user session. + // + // @tag User + // Minimum server version: 6.2 + RevokeSession(sessionID string) *model.AppError + + // CreateUserAccessToken creates a new access token. + // @tag User + // Minimum server version: 5.38 + CreateUserAccessToken(token *model.UserAccessToken) (*model.UserAccessToken, *model.AppError) + + // RevokeUserAccessToken revokes an existing access token. + // @tag User + // Minimum server version: 5.38 + RevokeUserAccessToken(tokenID string) *model.AppError + + // GetTeamIcon gets the team icon. + // + // @tag Team + // Minimum server version: 5.6 + GetTeamIcon(teamID string) ([]byte, *model.AppError) + + // SetTeamIcon sets the team icon. + // + // @tag Team + // Minimum server version: 5.6 + SetTeamIcon(teamID string, data []byte) *model.AppError + + // RemoveTeamIcon removes the team icon. + // + // @tag Team + // Minimum server version: 5.6 + RemoveTeamIcon(teamID string) *model.AppError + + // UpdateUser updates a user. + // + // @tag User + // Minimum server version: 5.2 + UpdateUser(user *model.User) (*model.User, *model.AppError) + + // GetUserStatus will get a user's status. + // + // @tag User + // Minimum server version: 5.2 + GetUserStatus(userID string) (*model.Status, *model.AppError) + + // GetUserStatusesByIds will return a list of user statuses based on the provided slice of user IDs. + // + // @tag User + // Minimum server version: 5.2 + GetUserStatusesByIds(userIds []string) ([]*model.Status, *model.AppError) + + // UpdateUserStatus will set a user's status until the user, or another integration/plugin, sets it back to online. + // The status parameter can be: "online", "away", "dnd", or "offline". + // + // @tag User + // Minimum server version: 5.2 + UpdateUserStatus(userID, status string) (*model.Status, *model.AppError) + + // SetUserStatusTimedDND will set a user's status to dnd for given time until the user, + // or another integration/plugin, sets it back to online. + // @tag User + // Minimum server version: 5.35 + SetUserStatusTimedDND(userId string, endtime int64) (*model.Status, *model.AppError) + + // UpdateUserActive deactivates or reactivates an user. + // + // @tag User + // Minimum server version: 5.8 + UpdateUserActive(userID string, active bool) *model.AppError + + // UpdateUserCustomStatus will set a user's custom status until the user, or another integration/plugin, clear it or update the custom status. + // The custom status have two parameters: emoji icon and custom text. + // + // @tag User + // Minimum server version: 6.2 + UpdateUserCustomStatus(userID string, customStatus *model.CustomStatus) *model.AppError + + // RemoveUserCustomStatus will remove a user's custom status. + // + // @tag User + // Minimum server version: 6.2 + RemoveUserCustomStatus(userID string) *model.AppError + + // GetUsersInChannel returns a page of users in a channel. Page counting starts at 0. + // The sortBy parameter can be: "username" or "status". + // + // @tag User + // @tag Channel + // Minimum server version: 5.6 + GetUsersInChannel(channelID, sortBy string, page, perPage int) ([]*model.User, *model.AppError) + + // GetLDAPUserAttributes will return LDAP attributes for a user. + // The attributes parameter should be a list of attributes to pull. + // Returns a map with attribute names as keys and the user's attributes as values. + // Requires an enterprise license, LDAP to be configured and for the user to use LDAP as an authentication method. + // + // @tag User + // Minimum server version: 5.3 + GetLDAPUserAttributes(userID string, attributes []string) (map[string]string, *model.AppError) + + // CreateTeam creates a team. + // + // @tag Team + // Minimum server version: 5.2 + CreateTeam(team *model.Team) (*model.Team, *model.AppError) + + // DeleteTeam deletes a team. + // + // @tag Team + // Minimum server version: 5.2 + DeleteTeam(teamID string) *model.AppError + + // GetTeam gets all teams. + // + // @tag Team + // Minimum server version: 5.2 + GetTeams() ([]*model.Team, *model.AppError) + + // GetTeam gets a team. + // + // @tag Team + // Minimum server version: 5.2 + GetTeam(teamID string) (*model.Team, *model.AppError) + + // GetTeamByName gets a team by its name. + // + // @tag Team + // Minimum server version: 5.2 + GetTeamByName(name string) (*model.Team, *model.AppError) + + // GetTeamsUnreadForUser gets the unread message and mention counts for each team to which the given user belongs. + // + // @tag Team + // @tag User + // Minimum server version: 5.6 + GetTeamsUnreadForUser(userID string) ([]*model.TeamUnread, *model.AppError) + + // UpdateTeam updates a team. + // + // @tag Team + // Minimum server version: 5.2 + UpdateTeam(team *model.Team) (*model.Team, *model.AppError) + + // SearchTeams search a team. + // + // @tag Team + // Minimum server version: 5.8 + SearchTeams(term string) ([]*model.Team, *model.AppError) + + // GetTeamsForUser returns list of teams of given user ID. + // + // @tag Team + // @tag User + // Minimum server version: 5.6 + GetTeamsForUser(userID string) ([]*model.Team, *model.AppError) + + // CreateTeamMember creates a team membership. + // + // @tag Team + // @tag User + // Minimum server version: 5.2 + CreateTeamMember(teamID, userID string) (*model.TeamMember, *model.AppError) + + // CreateTeamMembers creates a team membership for all provided user ids. + // + // @tag Team + // @tag User + // Minimum server version: 5.2 + CreateTeamMembers(teamID string, userIds []string, requestorId string) ([]*model.TeamMember, *model.AppError) + + // CreateTeamMembersGracefully creates a team membership for all provided user ids and reports the users that were not added. + // + // @tag Team + // @tag User + // Minimum server version: 5.20 + CreateTeamMembersGracefully(teamID string, userIds []string, requestorId string) ([]*model.TeamMemberWithError, *model.AppError) + + // DeleteTeamMember deletes a team membership. + // + // @tag Team + // @tag User + // Minimum server version: 5.2 + DeleteTeamMember(teamID, userID, requestorId string) *model.AppError + + // GetTeamMembers returns the memberships of a specific team. + // + // @tag Team + // @tag User + // Minimum server version: 5.2 + GetTeamMembers(teamID string, page, perPage int) ([]*model.TeamMember, *model.AppError) + + // GetTeamMember returns a specific membership. + // + // @tag Team + // @tag User + // Minimum server version: 5.2 + GetTeamMember(teamID, userID string) (*model.TeamMember, *model.AppError) + + // GetTeamMembersForUser returns all team memberships for a user. + // + // @tag Team + // @tag User + // Minimum server version: 5.10 + GetTeamMembersForUser(userID string, page int, perPage int) ([]*model.TeamMember, *model.AppError) + + // UpdateTeamMemberRoles updates the role for a team membership. + // + // @tag Team + // @tag User + // Minimum server version: 5.2 + UpdateTeamMemberRoles(teamID, userID, newRoles string) (*model.TeamMember, *model.AppError) + + // CreateChannel creates a channel. + // + // @tag Channel + // Minimum server version: 5.2 + CreateChannel(channel *model.Channel) (*model.Channel, *model.AppError) + + // DeleteChannel deletes a channel. + // + // @tag Channel + // Minimum server version: 5.2 + DeleteChannel(channelId string) *model.AppError + + // GetPublicChannelsForTeam gets a list of all channels. + // + // @tag Channel + // @tag Team + // Minimum server version: 5.2 + GetPublicChannelsForTeam(teamID string, page, perPage int) ([]*model.Channel, *model.AppError) + + // GetChannel gets a channel. + // + // @tag Channel + // Minimum server version: 5.2 + GetChannel(channelId string) (*model.Channel, *model.AppError) + + // GetChannelByName gets a channel by its name, given a team id. + // + // @tag Channel + // Minimum server version: 5.2 + GetChannelByName(teamID, name string, includeDeleted bool) (*model.Channel, *model.AppError) + + // GetChannelByNameForTeamName gets a channel by its name, given a team name. + // + // @tag Channel + // @tag Team + // Minimum server version: 5.2 + GetChannelByNameForTeamName(teamName, channelName string, includeDeleted bool) (*model.Channel, *model.AppError) + + // GetChannelsForTeamForUser gets a list of channels for given user ID in given team ID, including DMs. + // If an empty string is passed as the team ID, the user's channels on all teams and their DMs will be returned. + // + // @tag Channel + // @tag Team + // @tag User + // Minimum server version: 5.6 + GetChannelsForTeamForUser(teamID, userID string, includeDeleted bool) ([]*model.Channel, *model.AppError) + + // GetChannelStats gets statistics for a channel. + // + // @tag Channel + // Minimum server version: 5.6 + GetChannelStats(channelId string) (*model.ChannelStats, *model.AppError) + + // GetDirectChannel gets a direct message channel. + // If the channel does not exist it will create it. + // + // @tag Channel + // @tag User + // Minimum server version: 5.2 + GetDirectChannel(userId1, userId2 string) (*model.Channel, *model.AppError) + + // GetGroupChannel gets a group message channel. + // If the channel does not exist it will create it. + // + // @tag Channel + // @tag User + // Minimum server version: 5.2 + GetGroupChannel(userIds []string) (*model.Channel, *model.AppError) + + // UpdateChannel updates a channel. + // + // @tag Channel + // Minimum server version: 5.2 + UpdateChannel(channel *model.Channel) (*model.Channel, *model.AppError) + + // SearchChannels returns the channels on a team matching the provided search term. + // + // @tag Channel + // Minimum server version: 5.6 + SearchChannels(teamID string, term string) ([]*model.Channel, *model.AppError) + + // CreateChannelSidebarCategory creates a new sidebar category for a set of channels. + // + // @tag ChannelSidebar + // Minimum server version: 5.38 + CreateChannelSidebarCategory(userID, teamID string, newCategory *model.SidebarCategoryWithChannels) (*model.SidebarCategoryWithChannels, *model.AppError) + + // GetChannelSidebarCategories returns sidebar categories. + // + // @tag ChannelSidebar + // Minimum server version: 5.38 + GetChannelSidebarCategories(userID, teamID string) (*model.OrderedSidebarCategories, *model.AppError) + + // UpdateChannelSidebarCategories updates the channel sidebar categories. + // + // @tag ChannelSidebar + // Minimum server version: 5.38 + UpdateChannelSidebarCategories(userID, teamID string, categories []*model.SidebarCategoryWithChannels) ([]*model.SidebarCategoryWithChannels, *model.AppError) + + // SearchUsers returns a list of users based on some search criteria. + // + // @tag User + // Minimum server version: 5.6 + SearchUsers(search *model.UserSearch) ([]*model.User, *model.AppError) + + // SearchPostsInTeam returns a list of posts in a specific team that match the given params. + // + // @tag Post + // @tag Team + // Minimum server version: 5.10 + SearchPostsInTeam(teamID string, paramsList []*model.SearchParams) ([]*model.Post, *model.AppError) + + // SearchPostsInTeamForUser returns a list of posts by team and user that match the given + // search parameters. + // @tag Post + // Minimum server version: 5.26 + SearchPostsInTeamForUser(teamID string, userID string, searchParams model.SearchParameter) (*model.PostSearchResults, *model.AppError) + + // AddChannelMember joins a user to a channel (as if they joined themselves) + // This means the user will not receive notifications for joining the channel. + // + // @tag Channel + // @tag User + // Minimum server version: 5.2 + AddChannelMember(channelId, userID string) (*model.ChannelMember, *model.AppError) + + // AddUserToChannel adds a user to a channel as if the specified user had invited them. + // This means the user will receive the regular notifications for being added to the channel. + // + // @tag User + // @tag Channel + // Minimum server version: 5.18 + AddUserToChannel(channelId, userID, asUserId string) (*model.ChannelMember, *model.AppError) + + // GetChannelMember gets a channel membership for a user. + // + // @tag Channel + // @tag User + // Minimum server version: 5.2 + GetChannelMember(channelId, userID string) (*model.ChannelMember, *model.AppError) + + // GetChannelMembers gets a channel membership for all users. + // + // @tag Channel + // @tag User + // Minimum server version: 5.6 + GetChannelMembers(channelId string, page, perPage int) (model.ChannelMembers, *model.AppError) + + // GetChannelMembersByIds gets a channel membership for a particular User + // + // @tag Channel + // @tag User + // Minimum server version: 5.6 + GetChannelMembersByIds(channelId string, userIds []string) (model.ChannelMembers, *model.AppError) + + // GetChannelMembersForUser returns all channel memberships on a team for a user. + // + // @tag Channel + // @tag User + // Minimum server version: 5.10 + GetChannelMembersForUser(teamID, userID string, page, perPage int) ([]*model.ChannelMember, *model.AppError) + + // UpdateChannelMemberRoles updates a user's roles for a channel. + // + // @tag Channel + // @tag User + // Minimum server version: 5.2 + UpdateChannelMemberRoles(channelId, userID, newRoles string) (*model.ChannelMember, *model.AppError) + + // UpdateChannelMemberNotifications updates a user's notification properties for a channel. + // + // @tag Channel + // @tag User + // Minimum server version: 5.2 + UpdateChannelMemberNotifications(channelId, userID string, notifications map[string]string) (*model.ChannelMember, *model.AppError) + + // PatchChannelMembersNotifications updates the notification properties for multiple channel members. + // Other changes made to the channel memberships will be ignored. A maximum of 200 members can be + // updated at once. + // + // @tag Channel + // @tag User + // Minimum server version: 9.5 + PatchChannelMembersNotifications(members []*model.ChannelMemberIdentifier, notifyProps map[string]string) *model.AppError + + // GetGroup gets a group by ID. + // + // @tag Group + // Minimum server version: 5.18 + GetGroup(groupId string) (*model.Group, *model.AppError) + + // GetGroupByName gets a group by name. + // + // @tag Group + // Minimum server version: 5.18 + GetGroupByName(name string) (*model.Group, *model.AppError) + + // GetGroupMemberUsers gets a page of users belonging to the given group. + // + // @tag Group + // Minimum server version: 5.35 + GetGroupMemberUsers(groupID string, page, perPage int) ([]*model.User, *model.AppError) + + // GetGroupsBySource gets a list of all groups for the given source. + // + // @tag Group + // Minimum server version: 5.35 + GetGroupsBySource(groupSource model.GroupSource) ([]*model.Group, *model.AppError) + + // GetGroupsForUser gets the groups a user is in. + // + // @tag Group + // @tag User + // Minimum server version: 5.18 + GetGroupsForUser(userID string) ([]*model.Group, *model.AppError) + + // DeleteChannelMember deletes a channel membership for a user. + // + // @tag Channel + // @tag User + // Minimum server version: 5.2 + DeleteChannelMember(channelId, userID string) *model.AppError + + // CreatePost creates a post. + // + // @tag Post + // Minimum server version: 5.2 + CreatePost(post *model.Post) (*model.Post, *model.AppError) + + // AddReaction add a reaction to a post. + // + // @tag Post + // Minimum server version: 5.3 + AddReaction(reaction *model.Reaction) (*model.Reaction, *model.AppError) + + // RemoveReaction remove a reaction from a post. + // + // @tag Post + // Minimum server version: 5.3 + RemoveReaction(reaction *model.Reaction) *model.AppError + + // GetReaction get the reactions of a post. + // + // @tag Post + // Minimum server version: 5.3 + GetReactions(postId string) ([]*model.Reaction, *model.AppError) + + // SendEphemeralPost creates an ephemeral post. + // + // @tag Post + // Minimum server version: 5.2 + SendEphemeralPost(userID string, post *model.Post) *model.Post + + // UpdateEphemeralPost updates an ephemeral message previously sent to the user. + // EXPERIMENTAL: This API is experimental and can be changed without advance notice. + // + // @tag Post + // Minimum server version: 5.2 + UpdateEphemeralPost(userID string, post *model.Post) *model.Post + + // DeleteEphemeralPost deletes an ephemeral message previously sent to the user. + // EXPERIMENTAL: This API is experimental and can be changed without advance notice. + // + // @tag Post + // Minimum server version: 5.2 + DeleteEphemeralPost(userID, postId string) + + // DeletePost deletes a post. + // + // @tag Post + // Minimum server version: 5.2 + DeletePost(postId string) *model.AppError + + // GetPostThread gets a post with all the other posts in the same thread. + // + // @tag Post + // Minimum server version: 5.6 + GetPostThread(postId string) (*model.PostList, *model.AppError) + + // GetPost gets a post. + // + // @tag Post + // Minimum server version: 5.2 + GetPost(postId string) (*model.Post, *model.AppError) + + // GetPostsSince gets posts created after a specified time as Unix time in milliseconds. + // + // @tag Post + // @tag Channel + // Minimum server version: 5.6 + GetPostsSince(channelId string, time int64) (*model.PostList, *model.AppError) + + // GetPostsAfter gets a page of posts that were posted after the post provided. + // + // @tag Post + // @tag Channel + // Minimum server version: 5.6 + GetPostsAfter(channelId, postId string, page, perPage int) (*model.PostList, *model.AppError) + + // GetPostsBefore gets a page of posts that were posted before the post provided. + // + // @tag Post + // @tag Channel + // Minimum server version: 5.6 + GetPostsBefore(channelId, postId string, page, perPage int) (*model.PostList, *model.AppError) + + // GetPostsForChannel gets a list of posts for a channel. + // + // @tag Post + // @tag Channel + // Minimum server version: 5.6 + GetPostsForChannel(channelId string, page, perPage int) (*model.PostList, *model.AppError) + + // GetTeamStats gets a team's statistics + // + // @tag Team + // Minimum server version: 5.8 + GetTeamStats(teamID string) (*model.TeamStats, *model.AppError) + + // UpdatePost updates a post. + // + // @tag Post + // Minimum server version: 5.2 + UpdatePost(post *model.Post) (*model.Post, *model.AppError) + + // GetProfileImage gets user's profile image. + // + // @tag User + // Minimum server version: 5.6 + GetProfileImage(userID string) ([]byte, *model.AppError) + + // SetProfileImage sets a user's profile image. + // + // @tag User + // Minimum server version: 5.6 + SetProfileImage(userID string, data []byte) *model.AppError + + // GetEmojiList returns a page of custom emoji on the system. + // + // The sortBy parameter can be: "name". + // + // @tag Emoji + // Minimum server version: 5.6 + GetEmojiList(sortBy string, page, perPage int) ([]*model.Emoji, *model.AppError) + + // GetEmojiByName gets an emoji by it's name. + // + // @tag Emoji + // Minimum server version: 5.6 + GetEmojiByName(name string) (*model.Emoji, *model.AppError) + + // GetEmoji returns a custom emoji based on the emojiId string. + // + // @tag Emoji + // Minimum server version: 5.6 + GetEmoji(emojiId string) (*model.Emoji, *model.AppError) + + // CopyFileInfos duplicates the FileInfo objects referenced by the given file ids, + // recording the given user id as the new creator and returning the new set of file ids. + // + // The duplicate FileInfo objects are not initially linked to a post, but may now be passed + // to CreatePost. Use this API to duplicate a post and its file attachments without + // actually duplicating the uploaded files. + // + // @tag File + // @tag User + // Minimum server version: 5.2 + CopyFileInfos(userID string, fileIds []string) ([]string, *model.AppError) + + // GetFileInfo gets a File Info for a specific fileId + // + // @tag File + // Minimum server version: 5.3 + GetFileInfo(fileId string) (*model.FileInfo, *model.AppError) + + // SetFileSearchableContent update the File Info searchable text for full text search + // + // @tag File + // Minimum server version: 9.1 + SetFileSearchableContent(fileID string, content string) *model.AppError + + // GetFileInfos gets File Infos with options + // + // @tag File + // Minimum server version: 5.22 + GetFileInfos(page, perPage int, opt *model.GetFileInfosOptions) ([]*model.FileInfo, *model.AppError) + + // GetFile gets content of a file by it's ID + // + // @tag File + // Minimum server version: 5.8 + GetFile(fileId string) ([]byte, *model.AppError) + + // GetFileLink gets the public link to a file by fileId. + // + // @tag File + // Minimum server version: 5.6 + GetFileLink(fileId string) (string, *model.AppError) + + // ReadFile reads the file from the backend for a specific path + // + // @tag File + // Minimum server version: 5.3 + ReadFile(path string) ([]byte, *model.AppError) + + // GetEmojiImage returns the emoji image. + // + // @tag Emoji + // Minimum server version: 5.6 + GetEmojiImage(emojiId string) ([]byte, string, *model.AppError) + + // UploadFile will upload a file to a channel using a multipart request, to be later attached to a post. + // + // @tag File + // @tag Channel + // Minimum server version: 5.6 + UploadFile(data []byte, channelId string, filename string) (*model.FileInfo, *model.AppError) + + // OpenInteractiveDialog will open an interactive dialog on a user's client that + // generated the trigger ID. Used with interactive message buttons, menus + // and slash commands. + // + // Minimum server version: 5.6 + OpenInteractiveDialog(dialog model.OpenDialogRequest) *model.AppError + + // Plugin Section + + // GetPlugins will return a list of plugin manifests for currently active plugins. + // + // @tag Plugin + // Minimum server version: 5.6 + GetPlugins() ([]*model.Manifest, *model.AppError) + + // EnablePlugin will enable an plugin installed. + // + // @tag Plugin + // Minimum server version: 5.6 + EnablePlugin(id string) *model.AppError + + // DisablePlugin will disable an enabled plugin. + // + // @tag Plugin + // Minimum server version: 5.6 + DisablePlugin(id string) *model.AppError + + // RemovePlugin will disable and delete a plugin. + // + // @tag Plugin + // Minimum server version: 5.6 + RemovePlugin(id string) *model.AppError + + // GetPluginStatus will return the status of a plugin. + // + // @tag Plugin + // Minimum server version: 5.6 + GetPluginStatus(id string) (*model.PluginStatus, *model.AppError) + + // InstallPlugin will upload another plugin with tar.gz file. + // Previous version will be replaced on replace true. + // + // @tag Plugin + // Minimum server version: 5.18 + InstallPlugin(file io.Reader, replace bool) (*model.Manifest, *model.AppError) + + // KV Store Section + + // KVSet stores a key-value pair, unique per plugin. + // Provided helper functions and internal plugin code will use the prefix `mmi_` before keys. Do not use this prefix. + // + // @tag KeyValueStore + // Minimum server version: 5.2 + KVSet(key string, value []byte) *model.AppError + + // KVCompareAndSet updates a key-value pair, unique per plugin, but only if the current value matches the given oldValue. + // Inserts a new key if oldValue == nil. + // Returns (false, err) if DB error occurred + // Returns (false, nil) if current value != oldValue or key already exists when inserting + // Returns (true, nil) if current value == oldValue or new key is inserted + // + // @tag KeyValueStore + // Minimum server version: 5.12 + KVCompareAndSet(key string, oldValue, newValue []byte) (bool, *model.AppError) + + // KVCompareAndDelete deletes a key-value pair, unique per plugin, but only if the current value matches the given oldValue. + // Returns (false, err) if DB error occurred + // Returns (false, nil) if current value != oldValue or key does not exist when deleting + // Returns (true, nil) if current value == oldValue and the key was deleted + // + // @tag KeyValueStore + // Minimum server version: 5.16 + KVCompareAndDelete(key string, oldValue []byte) (bool, *model.AppError) + + // KVSetWithOptions stores a key-value pair, unique per plugin, according to the given options. + // Returns (false, err) if DB error occurred + // Returns (false, nil) if the value was not set + // Returns (true, nil) if the value was set + // + // Minimum server version: 5.20 + KVSetWithOptions(key string, value []byte, options model.PluginKVSetOptions) (bool, *model.AppError) + + // KVSet stores a key-value pair with an expiry time, unique per plugin. + // + // @tag KeyValueStore + // Minimum server version: 5.6 + KVSetWithExpiry(key string, value []byte, expireInSeconds int64) *model.AppError + + // KVGet retrieves a value based on the key, unique per plugin. Returns nil for non-existent keys. + // + // @tag KeyValueStore + // Minimum server version: 5.2 + KVGet(key string) ([]byte, *model.AppError) + + // KVDelete removes a key-value pair, unique per plugin. Returns nil for non-existent keys. + // + // @tag KeyValueStore + // Minimum server version: 5.2 + KVDelete(key string) *model.AppError + + // KVDeleteAll removes all key-value pairs for a plugin. + // + // @tag KeyValueStore + // Minimum server version: 5.6 + KVDeleteAll() *model.AppError + + // KVList lists all keys for a plugin. + // + // @tag KeyValueStore + // Minimum server version: 5.6 + KVList(page, perPage int) ([]string, *model.AppError) + + // PublishWebSocketEvent sends an event to WebSocket connections. + // event is the type and will be prepended with "custom__". + // payload is the data sent with the event. Interface values must be primitive Go types or mattermost-server/model types. + // broadcast determines to which users to send the event. + // + // Minimum server version: 5.2 + PublishWebSocketEvent(event string, payload map[string]any, broadcast *model.WebsocketBroadcast) + + // HasPermissionTo check if the user has the permission at system scope. + // + // @tag User + // Minimum server version: 5.3 + HasPermissionTo(userID string, permission *model.Permission) bool + + // HasPermissionToTeam check if the user has the permission at team scope. + // + // @tag User + // @tag Team + // Minimum server version: 5.3 + HasPermissionToTeam(userID, teamID string, permission *model.Permission) bool + + // HasPermissionToChannel check if the user has the permission at channel scope. + // + // @tag User + // @tag Channel + // Minimum server version: 5.3 + HasPermissionToChannel(userID, channelId string, permission *model.Permission) bool + + // RolesGrantPermission check if the specified roles grant the specified permission + // + // Minimum server version: 6.3 + RolesGrantPermission(roleNames []string, permissionId string) bool + + // LogDebug writes a log message to the Mattermost server log file. + // Appropriate context such as the plugin name will already be added as fields so plugins + // do not need to add that info. + // + // @tag Logging + // Minimum server version: 5.2 + LogDebug(msg string, keyValuePairs ...any) + + // LogInfo writes a log message to the Mattermost server log file. + // Appropriate context such as the plugin name will already be added as fields so plugins + // do not need to add that info. + // + // @tag Logging + // Minimum server version: 5.2 + LogInfo(msg string, keyValuePairs ...any) + + // LogError writes a log message to the Mattermost server log file. + // Appropriate context such as the plugin name will already be added as fields so plugins + // do not need to add that info. + // + // @tag Logging + // Minimum server version: 5.2 + LogError(msg string, keyValuePairs ...any) + + // LogWarn writes a log message to the Mattermost server log file. + // Appropriate context such as the plugin name will already be added as fields so plugins + // do not need to add that info. + // + // @tag Logging + // Minimum server version: 5.2 + LogWarn(msg string, keyValuePairs ...any) + + // SendMail sends an email to a specific address + // + // Minimum server version: 5.7 + SendMail(to, subject, htmlBody string) *model.AppError + + // CreateBot creates the given bot and corresponding user. + // + // @tag Bot + // Minimum server version: 5.10 + CreateBot(bot *model.Bot) (*model.Bot, *model.AppError) + + // PatchBot applies the given patch to the bot and corresponding user. + // + // @tag Bot + // Minimum server version: 5.10 + PatchBot(botUserId string, botPatch *model.BotPatch) (*model.Bot, *model.AppError) + + // GetBot returns the given bot. + // + // @tag Bot + // Minimum server version: 5.10 + GetBot(botUserId string, includeDeleted bool) (*model.Bot, *model.AppError) + + // GetBots returns the requested page of bots. + // + // @tag Bot + // Minimum server version: 5.10 + GetBots(options *model.BotGetOptions) ([]*model.Bot, *model.AppError) + + // UpdateBotActive marks a bot as active or inactive, along with its corresponding user. + // + // @tag Bot + // Minimum server version: 5.10 + UpdateBotActive(botUserId string, active bool) (*model.Bot, *model.AppError) + + // PermanentDeleteBot permanently deletes a bot and its corresponding user. + // + // @tag Bot + // Minimum server version: 5.10 + PermanentDeleteBot(botUserId string) *model.AppError + + // PluginHTTP allows inter-plugin requests to plugin APIs. + // + // Minimum server version: 5.18 + PluginHTTP(request *http.Request) *http.Response + + // PublishUserTyping publishes a user is typing WebSocket event. + // The parentId parameter may be an empty string, the other parameters are required. + // + // @tag User + // Minimum server version: 5.26 + PublishUserTyping(userID, channelId, parentId string) *model.AppError + + // CreateCommand creates a server-owned slash command that is not handled by the plugin + // itself, and which will persist past the life of the plugin. The command will have its + // CreatorId set to "" and its PluginId set to the id of the plugin that created it. + // + // @tag SlashCommand + // Minimum server version: 5.28 + CreateCommand(cmd *model.Command) (*model.Command, error) + + // ListCommands returns the list of all slash commands for teamID. E.g., custom commands + // (those created through the integrations menu, the REST api, or the plugin api CreateCommand), + // plugin commands (those created with plugin api RegisterCommand), and builtin commands + // (those added internally through RegisterCommandProvider). + // + // @tag SlashCommand + // Minimum server version: 5.28 + ListCommands(teamID string) ([]*model.Command, error) + + // ListCustomCommands returns the list of slash commands for teamID that where created + // through the integrations menu, the REST api, or the plugin api CreateCommand. + // + // @tag SlashCommand + // Minimum server version: 5.28 + ListCustomCommands(teamID string) ([]*model.Command, error) + + // ListPluginCommands returns the list of slash commands for teamID that were created + // with the plugin api RegisterCommand. + // + // @tag SlashCommand + // Minimum server version: 5.28 + ListPluginCommands(teamID string) ([]*model.Command, error) + + // ListBuiltInCommands returns the list of slash commands that are builtin commands + // (those added internally through RegisterCommandProvider). + // + // @tag SlashCommand + // Minimum server version: 5.28 + ListBuiltInCommands() ([]*model.Command, error) + + // GetCommand returns the command definition based on a command id string. + // + // @tag SlashCommand + // Minimum server version: 5.28 + GetCommand(commandID string) (*model.Command, error) + + // UpdateCommand updates a single command (commandID) with the information provided in the + // updatedCmd model.Command struct. The following fields in the command cannot be updated: + // Id, Token, CreateAt, DeleteAt, and PluginId. If updatedCmd.TeamId is blank, it + // will be set to commandID's TeamId. + // + // @tag SlashCommand + // Minimum server version: 5.28 + UpdateCommand(commandID string, updatedCmd *model.Command) (*model.Command, error) + + // DeleteCommand deletes a slash command (commandID). + // + // @tag SlashCommand + // Minimum server version: 5.28 + DeleteCommand(commandID string) error + + // CreateOAuthApp creates a new OAuth App. + // + // @tag OAuth + // Minimum server version: 5.38 + CreateOAuthApp(app *model.OAuthApp) (*model.OAuthApp, *model.AppError) + + // GetOAuthApp gets an existing OAuth App by id. + // + // @tag OAuth + // Minimum server version: 5.38 + GetOAuthApp(appID string) (*model.OAuthApp, *model.AppError) + + // UpdateOAuthApp updates an existing OAuth App. + // + // @tag OAuth + // Minimum server version: 5.38 + UpdateOAuthApp(app *model.OAuthApp) (*model.OAuthApp, *model.AppError) + + // DeleteOAuthApp deletes an existing OAuth App by id. + // + // @tag OAuth + // Minimum server version: 5.38 + DeleteOAuthApp(appID string) *model.AppError + + // PublishPluginClusterEvent broadcasts a plugin event to all other running instances of + // the calling plugin that are present in the cluster. + // + // This method is used to allow plugin communication in a High-Availability cluster. + // The receiving side should implement the OnPluginClusterEvent hook + // to receive events sent through this method. + // + // Minimum server version: 5.36 + PublishPluginClusterEvent(ev model.PluginClusterEvent, opts model.PluginClusterEventSendOptions) error + + // RequestTrialLicense requests a trial license and installs it in the server + // + // Minimum server version: 5.36 + RequestTrialLicense(requesterID string, users int, termsAccepted bool, receiveEmailsAccepted bool) *model.AppError + + // GetCloudLimits gets limits associated with a cloud workspace, if any + // + // Minimum server version: 7.0 + GetCloudLimits() (*model.ProductLimits, error) + + // EnsureBotUser updates the bot if it exists, otherwise creates it. + // + // Minimum server version: 7.1 + EnsureBotUser(bot *model.Bot) (string, error) + + // RegisterCollectionAndTopic is no longer supported. + // + // Minimum server version: 7.6 + RegisterCollectionAndTopic(collectionType, topicType string) error + + // CreateUploadSession creates and returns a new (resumable) upload session. + // + // @tag Upload + // Minimum server version: 7.6 + CreateUploadSession(us *model.UploadSession) (*model.UploadSession, error) + + // UploadData uploads the data for a given upload session. + // + // @tag Upload + // Minimum server version: 7.6 + UploadData(us *model.UploadSession, rd io.Reader) (*model.FileInfo, error) + + // GetUploadSession returns the upload session for the provided id. + // + // @tag Upload + // Minimum server version: 7.6 + GetUploadSession(uploadID string) (*model.UploadSession, error) + + // SendPushNotification will send a push notification to all of user's sessions. + // + // It is the responsibility of the plugin to respect the server's configuration and licence, + // especially related to `cfg.EmailSettings.PushNotificationContents`, particularly + // `model.IdLoadedNotification` and the generic settings. + // Refer to `app.sendPushNotificationSync` for the logic used to construct push notifications. + // + // Note: the NotificationWillBePushed hook will be run after SendPushNotification is called. + // + // Minimum server version: 9.0 + SendPushNotification(notification *model.PushNotification, userID string) *model.AppError + + // UpdateUserAuth updates a user's auth data. + // + // It is not currently possible to use this to set a user's auth to e-mail with a hashed + // password. It is meant to be used exclusively in setting a non-email auth service. + // + // @tag User + // Minimum server version: 9.3 + UpdateUserAuth(userID string, userAuth *model.UserAuth) (*model.UserAuth, *model.AppError) + + // RegisterPluginForSharedChannels registers the plugin as a `Remote` for SharedChannels. + // The plugin will receive synchronization messages via the `OnSharedChannelsSyncMsg` hook. + // This API is idempotent - when called repeatedly with the same `RegisterPluginOpts.PluginID` + // it will return the same remoteID. + // + // @tag SharedChannels + // Minimum server version: 9.5 + RegisterPluginForSharedChannels(opts model.RegisterPluginOpts) (remoteID string, err error) + + // UnregisterPluginForSharedChannels unregisters the plugin as a `Remote` for SharedChannels. + // The plugin will no longer receive synchronization messages via the `OnSharedChannelsSyncMsg` hook. + // + // @tag SharedChannels + // Minimum server version: 9.5 + UnregisterPluginForSharedChannels(pluginID string) error + + // ShareChannel marks a channel for sharing via shared channels. Note, this does not automatically + // invite any remote clusters to the channel - use `InviteRemote` to invite a remote , or this plugin, + // to the shared channel and start synchronization. + // + // @tag SharedChannels + // Minimum server version: 9.5 + ShareChannel(sc *model.SharedChannel) (*model.SharedChannel, error) + + // UpdateSharedChannel updates a shared channel. This can be used to change the share name, + // display name, purpose, header, etc. + // + // @tag SharedChannels + // Minimum server version: 9.5 + UpdateSharedChannel(sc *model.SharedChannel) (*model.SharedChannel, error) + + // UnshareChannel unmarks a channel for sharing. The channel will no longer be shared and + // all remotes will be uninvited to the channel. + // + // @tag SharedChannels + // Minimum server version: 9.5 + UnshareChannel(channelID string) (unshared bool, err error) + + // UpdateSharedChannelCursor updates the cursor for the specified channel and RemoteID (passed by + // the plugin when registering). This can be used to manually set the point of last sync, either + // forward to skip older posts, or backward to re-sync history. This call by itself does not force + // a re-sync - a change to channel contents or a call to SyncSharedChannel are needed to force a sync. + // + // @tag SharedChannels + // Minimum server version: 9.5 + UpdateSharedChannelCursor(channelID, remoteID string, cusror model.GetPostsSinceForSyncCursor) error + + // SyncSharedChannel forces a shared channel to send any changed content to all remotes. + // + // @tag SharedChannels + // Minimum server version: 9.5 + SyncSharedChannel(channelID string) error + + // InviteRemoteToChannel invites a remote, or this plugin, as a target for synchronizing. Once invited, the + // remote will start to receive synchronization messages for any changed content in the specified channel. + // If `shareIfNotShared` is true, the channel's shared flag will be set, if not already. + // + // @tag SharedChannels + // Minimum server version: 9.5 + InviteRemoteToChannel(channelID string, remoteID string, userID string, shareIfNotShared bool) error + + // UninviteRemoteFromChannel uninvites a remote, or this plugin, such that it will stop receiving sychronization + // messages for the channel. + // + // @tag SharedChannels + // Minimum server version: 9.5 + UninviteRemoteFromChannel(channelID string, remoteID string) error + + // UpsertGroupMember adds a user to a group or updates their existing membership. + // + // @tag Group + // @tag User + // Minimum server version: 10.7 + UpsertGroupMember(groupID string, userID string) (*model.GroupMember, *model.AppError) + + // UpsertGroupMembers adds multiple users to a group or updates their existing memberships. + // + // @tag Group + // @tag User + // Minimum server version: 10.7 + UpsertGroupMembers(groupID string, userIDs []string) ([]*model.GroupMember, *model.AppError) + + // GetGroupByRemoteID gets a group by its remote ID. + // + // @tag Group + // Minimum server version: 10.7 + GetGroupByRemoteID(remoteID string, groupSource model.GroupSource) (*model.Group, *model.AppError) + + // CreateGroup creates a new group. + // + // @tag Group + // Minimum server version: 10.7 + CreateGroup(group *model.Group) (*model.Group, *model.AppError) + + // UpdateGroup updates a group. + // + // @tag Group + // Minimum server version: 10.7 + UpdateGroup(group *model.Group) (*model.Group, *model.AppError) + + // DeleteGroup soft deletes a group. + // + // @tag Group + // Minimum server version: 10.7 + DeleteGroup(groupID string) (*model.Group, *model.AppError) + + // RestoreGroup restores a soft deleted group. + // + // @tag Group + // Minimum server version: 10.7 + RestoreGroup(groupID string) (*model.Group, *model.AppError) + + // DeleteGroupMember removes a user from a group. + // + // @tag Group + // @tag User + // Minimum server version: 10.7 + DeleteGroupMember(groupID string, userID string) (*model.GroupMember, *model.AppError) + + // GetGroupSyncable gets a group syncable. + // + // @tag Group + // Minimum server version: 10.7 + GetGroupSyncable(groupID string, syncableID string, syncableType model.GroupSyncableType) (*model.GroupSyncable, *model.AppError) + + // GetGroupSyncables gets all group syncables for the given group. + // + // @tag Group + // Minimum server version: 10.7 + GetGroupSyncables(groupID string, syncableType model.GroupSyncableType) ([]*model.GroupSyncable, *model.AppError) + + // UpsertGroupSyncable creates or updates a group syncable. + // + // @tag Group + // Minimum server version: 10.7 + UpsertGroupSyncable(groupSyncable *model.GroupSyncable) (*model.GroupSyncable, *model.AppError) + + // UpdateGroupSyncable updates a group syncable. + // + // @tag Group + // Minimum server version: 10.7 + UpdateGroupSyncable(groupSyncable *model.GroupSyncable) (*model.GroupSyncable, *model.AppError) + + // DeleteGroupSyncable deletes a group syncable. + // + // @tag Group + // Minimum server version: 10.7 + DeleteGroupSyncable(groupID string, syncableID string, syncableType model.GroupSyncableType) (*model.GroupSyncable, *model.AppError) + + // UpdateUserRoles updates the role for a user. + // + // @tag Team + // @tag User + // Minimum server version: 9.8 + UpdateUserRoles(userID, newRoles string) (*model.User, *model.AppError) + + // GetPluginID returns the plugin ID. + // + // @tag Plugin + // Minimum server version: 10.1 + GetPluginID() string + + // GetGroups returns a list of all groups with the given options and restrictions. + // + // @tag Group + // Minimum server version: 10.7 + GetGroups(page, perPage int, opts model.GroupSearchOpts, viewRestrictions *model.ViewUsersRestrictions) ([]*model.Group, *model.AppError) + + // CreateDefaultSyncableMemberships creates default syncable memberships based off the provided parameters. + // + // @tag Group + // Minimum server version: 10.9 + CreateDefaultSyncableMemberships(params model.CreateDefaultMembershipParams) *model.AppError + + // DeleteGroupConstrainedMemberships deletes team and channel memberships of users who aren't members of the allowed groups of all group-constrained teams and channels. + // + // @tag Group + // Minimum server version: 10.9 + DeleteGroupConstrainedMemberships() *model.AppError + + // CreatePropertyField creates a new property field. + // + // @tag PropertyField + // Minimum server version: 10.10 + CreatePropertyField(field *model.PropertyField) (*model.PropertyField, error) + + // GetPropertyField gets a property field by groupID and fieldID. + // + // @tag PropertyField + // Minimum server version: 10.10 + GetPropertyField(groupID, fieldID string) (*model.PropertyField, error) + + // GetPropertyFields gets multiple property fields by groupID and a list of IDs. + // + // @tag PropertyField + // Minimum server version: 10.10 + GetPropertyFields(groupID string, ids []string) ([]*model.PropertyField, error) + + // UpdatePropertyField updates an existing property field. + // + // @tag PropertyField + // Minimum server version: 10.10 + UpdatePropertyField(groupID string, field *model.PropertyField) (*model.PropertyField, error) + + // DeletePropertyField deletes a property field (soft delete). + // + // @tag PropertyField + // Minimum server version: 10.10 + DeletePropertyField(groupID, fieldID string) error + + // SearchPropertyFields searches for property fields with filtering options. + // + // @tag PropertyField + // Minimum server version: 11.0 + SearchPropertyFields(groupID string, opts model.PropertyFieldSearchOpts) ([]*model.PropertyField, error) + + // CountPropertyFields counts property fields for a group. + // + // @tag PropertyField + // Minimum server version: 11.0 + CountPropertyFields(groupID string, includeDeleted bool) (int64, error) + + // CountPropertyFieldsForTarget counts property fields for a specific target. + // + // @tag PropertyField + // Minimum server version: 11.0 + CountPropertyFieldsForTarget(groupID, targetType, targetID string, includeDeleted bool) (int64, error) + + // CreatePropertyValue creates a new property value. + // + // @tag PropertyValue + // Minimum server version: 10.10 + CreatePropertyValue(value *model.PropertyValue) (*model.PropertyValue, error) + + // GetPropertyValue gets a property value by groupID and valueID. + // + // @tag PropertyValue + // Minimum server version: 10.10 + GetPropertyValue(groupID, valueID string) (*model.PropertyValue, error) + + // GetPropertyValues gets multiple property values by groupID and a list of IDs. + // + // @tag PropertyValue + // Minimum server version: 10.10 + GetPropertyValues(groupID string, ids []string) ([]*model.PropertyValue, error) + + // UpdatePropertyValue updates an existing property value. + // + // @tag PropertyValue + // Minimum server version: 10.10 + UpdatePropertyValue(groupID string, value *model.PropertyValue) (*model.PropertyValue, error) + + // UpsertPropertyValue creates a new property value or updates if it already exists. + // + // @tag PropertyValue + // Minimum server version: 10.10 + UpsertPropertyValue(value *model.PropertyValue) (*model.PropertyValue, error) + + // DeletePropertyValue deletes a property value (soft delete). + // + // @tag PropertyValue + // Minimum server version: 10.10 + DeletePropertyValue(groupID, valueID string) error + + // SearchPropertyValues searches for property values with filtering options. + // + // @tag PropertyValue + // Minimum server version: 11.0 + SearchPropertyValues(groupID string, opts model.PropertyValueSearchOpts) ([]*model.PropertyValue, error) + + // RegisterPropertyGroup registers a new property group. + // + // @tag PropertyGroup + // Minimum server version: 10.10 + RegisterPropertyGroup(name string) (*model.PropertyGroup, error) + + // GetPropertyGroup gets a property group by name. + // + // @tag PropertyGroup + // Minimum server version: 10.10 + GetPropertyGroup(name string) (*model.PropertyGroup, error) + + // GetPropertyFieldByName gets a property field by groupID, targetID and name. + // + // @tag PropertyField + // Minimum server version: 10.10 + GetPropertyFieldByName(groupID, targetID, name string) (*model.PropertyField, error) + + // UpdatePropertyFields updates multiple property fields in a single operation. + // + // @tag PropertyField + // Minimum server version: 10.10 + UpdatePropertyFields(groupID string, fields []*model.PropertyField) ([]*model.PropertyField, error) + + // UpdatePropertyValues updates multiple property values in a single operation. + // + // @tag PropertyValue + // Minimum server version: 10.10 + UpdatePropertyValues(groupID string, values []*model.PropertyValue) ([]*model.PropertyValue, error) + + // UpsertPropertyValues creates or updates multiple property values in a single operation. + // + // @tag PropertyValue + // Minimum server version: 10.10 + UpsertPropertyValues(values []*model.PropertyValue) ([]*model.PropertyValue, error) + + // DeletePropertyValuesForTarget deletes all property values for a specific target. + // + // @tag PropertyValue + // Minimum server version: 10.10 + DeletePropertyValuesForTarget(groupID, targetType, targetID string) error + + // DeletePropertyValuesForField deletes all property values for a specific field. + // + // @tag PropertyValue + // Minimum server version: 10.10 + DeletePropertyValuesForField(groupID, fieldID string) error + + // LogAuditRec logs an audit record using the default audit logger. + // + // @tag Audit + // Minimum server version: 10.10 + LogAuditRec(rec *model.AuditRecord) + + // LogAuditRecWithLevel logs an audit record with a specific log level. + // + // @tag Audit + // Minimum server version: 10.10 + LogAuditRecWithLevel(rec *model.AuditRecord, level mlog.Level) +} + +var handshake = plugin.HandshakeConfig{ + ProtocolVersion: 1, + MagicCookieKey: "MATTERMOST_PLUGIN", + MagicCookieValue: "Securely message teams, anywhere.", +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/plugin/api_timer_layer_generated.go b/vendor/github.com/mattermost/mattermost/server/public/plugin/api_timer_layer_generated.go new file mode 100644 index 00000000..d490d6d5 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/plugin/api_timer_layer_generated.go @@ -0,0 +1,1673 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +// Code generated by "make pluginapi" +// DO NOT EDIT + +package plugin + +import ( + "io" + "net/http" + timePkg "time" + + "github.com/mattermost/mattermost/server/public/model" + "github.com/mattermost/mattermost/server/public/shared/mlog" +) + +type apiTimerLayer struct { + pluginID string + apiImpl API + metrics metricsInterface +} + +func (api *apiTimerLayer) recordTime(startTime timePkg.Time, name string, success bool) { + if api.metrics != nil { + elapsedTime := float64(timePkg.Since(startTime)) / float64(timePkg.Second) + api.metrics.ObservePluginAPIDuration(api.pluginID, name, success, elapsedTime) + } +} + +func (api *apiTimerLayer) LoadPluginConfiguration(dest any) error { + startTime := timePkg.Now() + _returnsA := api.apiImpl.LoadPluginConfiguration(dest) + api.recordTime(startTime, "LoadPluginConfiguration", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) RegisterCommand(command *model.Command) error { + startTime := timePkg.Now() + _returnsA := api.apiImpl.RegisterCommand(command) + api.recordTime(startTime, "RegisterCommand", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) UnregisterCommand(teamID, trigger string) error { + startTime := timePkg.Now() + _returnsA := api.apiImpl.UnregisterCommand(teamID, trigger) + api.recordTime(startTime, "UnregisterCommand", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) ExecuteSlashCommand(commandArgs *model.CommandArgs) (*model.CommandResponse, error) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.ExecuteSlashCommand(commandArgs) + api.recordTime(startTime, "ExecuteSlashCommand", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetConfig() *model.Config { + startTime := timePkg.Now() + _returnsA := api.apiImpl.GetConfig() + api.recordTime(startTime, "GetConfig", true) + return _returnsA +} + +func (api *apiTimerLayer) GetUnsanitizedConfig() *model.Config { + startTime := timePkg.Now() + _returnsA := api.apiImpl.GetUnsanitizedConfig() + api.recordTime(startTime, "GetUnsanitizedConfig", true) + return _returnsA +} + +func (api *apiTimerLayer) SaveConfig(config *model.Config) *model.AppError { + startTime := timePkg.Now() + _returnsA := api.apiImpl.SaveConfig(config) + api.recordTime(startTime, "SaveConfig", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) GetPluginConfig() map[string]any { + startTime := timePkg.Now() + _returnsA := api.apiImpl.GetPluginConfig() + api.recordTime(startTime, "GetPluginConfig", true) + return _returnsA +} + +func (api *apiTimerLayer) SavePluginConfig(config map[string]any) *model.AppError { + startTime := timePkg.Now() + _returnsA := api.apiImpl.SavePluginConfig(config) + api.recordTime(startTime, "SavePluginConfig", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) GetBundlePath() (string, error) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetBundlePath() + api.recordTime(startTime, "GetBundlePath", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetLicense() *model.License { + startTime := timePkg.Now() + _returnsA := api.apiImpl.GetLicense() + api.recordTime(startTime, "GetLicense", true) + return _returnsA +} + +func (api *apiTimerLayer) IsEnterpriseReady() bool { + startTime := timePkg.Now() + _returnsA := api.apiImpl.IsEnterpriseReady() + api.recordTime(startTime, "IsEnterpriseReady", true) + return _returnsA +} + +func (api *apiTimerLayer) GetServerVersion() string { + startTime := timePkg.Now() + _returnsA := api.apiImpl.GetServerVersion() + api.recordTime(startTime, "GetServerVersion", true) + return _returnsA +} + +func (api *apiTimerLayer) GetSystemInstallDate() (int64, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetSystemInstallDate() + api.recordTime(startTime, "GetSystemInstallDate", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetDiagnosticId() string { + startTime := timePkg.Now() + _returnsA := api.apiImpl.GetDiagnosticId() + api.recordTime(startTime, "GetDiagnosticId", true) + return _returnsA +} + +func (api *apiTimerLayer) GetTelemetryId() string { + startTime := timePkg.Now() + _returnsA := api.apiImpl.GetTelemetryId() + api.recordTime(startTime, "GetTelemetryId", true) + return _returnsA +} + +func (api *apiTimerLayer) CreateUser(user *model.User) (*model.User, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.CreateUser(user) + api.recordTime(startTime, "CreateUser", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) DeleteUser(userID string) *model.AppError { + startTime := timePkg.Now() + _returnsA := api.apiImpl.DeleteUser(userID) + api.recordTime(startTime, "DeleteUser", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) GetUsers(options *model.UserGetOptions) ([]*model.User, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetUsers(options) + api.recordTime(startTime, "GetUsers", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetUsersByIds(userIDs []string) ([]*model.User, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetUsersByIds(userIDs) + api.recordTime(startTime, "GetUsersByIds", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetUser(userID string) (*model.User, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetUser(userID) + api.recordTime(startTime, "GetUser", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetUserByEmail(email string) (*model.User, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetUserByEmail(email) + api.recordTime(startTime, "GetUserByEmail", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetUserByUsername(name string) (*model.User, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetUserByUsername(name) + api.recordTime(startTime, "GetUserByUsername", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetUsersByUsernames(usernames []string) ([]*model.User, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetUsersByUsernames(usernames) + api.recordTime(startTime, "GetUsersByUsernames", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetUsersInTeam(teamID string, page int, perPage int) ([]*model.User, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetUsersInTeam(teamID, page, perPage) + api.recordTime(startTime, "GetUsersInTeam", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetPreferenceForUser(userID, category, name string) (model.Preference, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetPreferenceForUser(userID, category, name) + api.recordTime(startTime, "GetPreferenceForUser", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetPreferencesForUser(userID string) ([]model.Preference, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetPreferencesForUser(userID) + api.recordTime(startTime, "GetPreferencesForUser", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) UpdatePreferencesForUser(userID string, preferences []model.Preference) *model.AppError { + startTime := timePkg.Now() + _returnsA := api.apiImpl.UpdatePreferencesForUser(userID, preferences) + api.recordTime(startTime, "UpdatePreferencesForUser", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) DeletePreferencesForUser(userID string, preferences []model.Preference) *model.AppError { + startTime := timePkg.Now() + _returnsA := api.apiImpl.DeletePreferencesForUser(userID, preferences) + api.recordTime(startTime, "DeletePreferencesForUser", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) GetSession(sessionID string) (*model.Session, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetSession(sessionID) + api.recordTime(startTime, "GetSession", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) CreateSession(session *model.Session) (*model.Session, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.CreateSession(session) + api.recordTime(startTime, "CreateSession", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) ExtendSessionExpiry(sessionID string, newExpiry int64) *model.AppError { + startTime := timePkg.Now() + _returnsA := api.apiImpl.ExtendSessionExpiry(sessionID, newExpiry) + api.recordTime(startTime, "ExtendSessionExpiry", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) RevokeSession(sessionID string) *model.AppError { + startTime := timePkg.Now() + _returnsA := api.apiImpl.RevokeSession(sessionID) + api.recordTime(startTime, "RevokeSession", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) CreateUserAccessToken(token *model.UserAccessToken) (*model.UserAccessToken, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.CreateUserAccessToken(token) + api.recordTime(startTime, "CreateUserAccessToken", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) RevokeUserAccessToken(tokenID string) *model.AppError { + startTime := timePkg.Now() + _returnsA := api.apiImpl.RevokeUserAccessToken(tokenID) + api.recordTime(startTime, "RevokeUserAccessToken", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) GetTeamIcon(teamID string) ([]byte, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetTeamIcon(teamID) + api.recordTime(startTime, "GetTeamIcon", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) SetTeamIcon(teamID string, data []byte) *model.AppError { + startTime := timePkg.Now() + _returnsA := api.apiImpl.SetTeamIcon(teamID, data) + api.recordTime(startTime, "SetTeamIcon", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) RemoveTeamIcon(teamID string) *model.AppError { + startTime := timePkg.Now() + _returnsA := api.apiImpl.RemoveTeamIcon(teamID) + api.recordTime(startTime, "RemoveTeamIcon", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) UpdateUser(user *model.User) (*model.User, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.UpdateUser(user) + api.recordTime(startTime, "UpdateUser", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetUserStatus(userID string) (*model.Status, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetUserStatus(userID) + api.recordTime(startTime, "GetUserStatus", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetUserStatusesByIds(userIds []string) ([]*model.Status, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetUserStatusesByIds(userIds) + api.recordTime(startTime, "GetUserStatusesByIds", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) UpdateUserStatus(userID, status string) (*model.Status, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.UpdateUserStatus(userID, status) + api.recordTime(startTime, "UpdateUserStatus", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) SetUserStatusTimedDND(userId string, endtime int64) (*model.Status, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.SetUserStatusTimedDND(userId, endtime) + api.recordTime(startTime, "SetUserStatusTimedDND", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) UpdateUserActive(userID string, active bool) *model.AppError { + startTime := timePkg.Now() + _returnsA := api.apiImpl.UpdateUserActive(userID, active) + api.recordTime(startTime, "UpdateUserActive", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) UpdateUserCustomStatus(userID string, customStatus *model.CustomStatus) *model.AppError { + startTime := timePkg.Now() + _returnsA := api.apiImpl.UpdateUserCustomStatus(userID, customStatus) + api.recordTime(startTime, "UpdateUserCustomStatus", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) RemoveUserCustomStatus(userID string) *model.AppError { + startTime := timePkg.Now() + _returnsA := api.apiImpl.RemoveUserCustomStatus(userID) + api.recordTime(startTime, "RemoveUserCustomStatus", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) GetUsersInChannel(channelID, sortBy string, page, perPage int) ([]*model.User, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetUsersInChannel(channelID, sortBy, page, perPage) + api.recordTime(startTime, "GetUsersInChannel", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetLDAPUserAttributes(userID string, attributes []string) (map[string]string, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetLDAPUserAttributes(userID, attributes) + api.recordTime(startTime, "GetLDAPUserAttributes", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) CreateTeam(team *model.Team) (*model.Team, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.CreateTeam(team) + api.recordTime(startTime, "CreateTeam", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) DeleteTeam(teamID string) *model.AppError { + startTime := timePkg.Now() + _returnsA := api.apiImpl.DeleteTeam(teamID) + api.recordTime(startTime, "DeleteTeam", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) GetTeams() ([]*model.Team, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetTeams() + api.recordTime(startTime, "GetTeams", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetTeam(teamID string) (*model.Team, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetTeam(teamID) + api.recordTime(startTime, "GetTeam", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetTeamByName(name string) (*model.Team, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetTeamByName(name) + api.recordTime(startTime, "GetTeamByName", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetTeamsUnreadForUser(userID string) ([]*model.TeamUnread, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetTeamsUnreadForUser(userID) + api.recordTime(startTime, "GetTeamsUnreadForUser", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) UpdateTeam(team *model.Team) (*model.Team, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.UpdateTeam(team) + api.recordTime(startTime, "UpdateTeam", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) SearchTeams(term string) ([]*model.Team, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.SearchTeams(term) + api.recordTime(startTime, "SearchTeams", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetTeamsForUser(userID string) ([]*model.Team, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetTeamsForUser(userID) + api.recordTime(startTime, "GetTeamsForUser", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) CreateTeamMember(teamID, userID string) (*model.TeamMember, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.CreateTeamMember(teamID, userID) + api.recordTime(startTime, "CreateTeamMember", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) CreateTeamMembers(teamID string, userIds []string, requestorId string) ([]*model.TeamMember, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.CreateTeamMembers(teamID, userIds, requestorId) + api.recordTime(startTime, "CreateTeamMembers", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) CreateTeamMembersGracefully(teamID string, userIds []string, requestorId string) ([]*model.TeamMemberWithError, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.CreateTeamMembersGracefully(teamID, userIds, requestorId) + api.recordTime(startTime, "CreateTeamMembersGracefully", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) DeleteTeamMember(teamID, userID, requestorId string) *model.AppError { + startTime := timePkg.Now() + _returnsA := api.apiImpl.DeleteTeamMember(teamID, userID, requestorId) + api.recordTime(startTime, "DeleteTeamMember", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) GetTeamMembers(teamID string, page, perPage int) ([]*model.TeamMember, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetTeamMembers(teamID, page, perPage) + api.recordTime(startTime, "GetTeamMembers", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetTeamMember(teamID, userID string) (*model.TeamMember, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetTeamMember(teamID, userID) + api.recordTime(startTime, "GetTeamMember", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetTeamMembersForUser(userID string, page int, perPage int) ([]*model.TeamMember, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetTeamMembersForUser(userID, page, perPage) + api.recordTime(startTime, "GetTeamMembersForUser", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) UpdateTeamMemberRoles(teamID, userID, newRoles string) (*model.TeamMember, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.UpdateTeamMemberRoles(teamID, userID, newRoles) + api.recordTime(startTime, "UpdateTeamMemberRoles", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) CreateChannel(channel *model.Channel) (*model.Channel, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.CreateChannel(channel) + api.recordTime(startTime, "CreateChannel", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) DeleteChannel(channelId string) *model.AppError { + startTime := timePkg.Now() + _returnsA := api.apiImpl.DeleteChannel(channelId) + api.recordTime(startTime, "DeleteChannel", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) GetPublicChannelsForTeam(teamID string, page, perPage int) ([]*model.Channel, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetPublicChannelsForTeam(teamID, page, perPage) + api.recordTime(startTime, "GetPublicChannelsForTeam", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetChannel(channelId string) (*model.Channel, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetChannel(channelId) + api.recordTime(startTime, "GetChannel", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetChannelByName(teamID, name string, includeDeleted bool) (*model.Channel, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetChannelByName(teamID, name, includeDeleted) + api.recordTime(startTime, "GetChannelByName", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetChannelByNameForTeamName(teamName, channelName string, includeDeleted bool) (*model.Channel, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetChannelByNameForTeamName(teamName, channelName, includeDeleted) + api.recordTime(startTime, "GetChannelByNameForTeamName", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetChannelsForTeamForUser(teamID, userID string, includeDeleted bool) ([]*model.Channel, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetChannelsForTeamForUser(teamID, userID, includeDeleted) + api.recordTime(startTime, "GetChannelsForTeamForUser", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetChannelStats(channelId string) (*model.ChannelStats, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetChannelStats(channelId) + api.recordTime(startTime, "GetChannelStats", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetDirectChannel(userId1, userId2 string) (*model.Channel, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetDirectChannel(userId1, userId2) + api.recordTime(startTime, "GetDirectChannel", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetGroupChannel(userIds []string) (*model.Channel, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetGroupChannel(userIds) + api.recordTime(startTime, "GetGroupChannel", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) UpdateChannel(channel *model.Channel) (*model.Channel, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.UpdateChannel(channel) + api.recordTime(startTime, "UpdateChannel", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) SearchChannels(teamID string, term string) ([]*model.Channel, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.SearchChannels(teamID, term) + api.recordTime(startTime, "SearchChannels", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) CreateChannelSidebarCategory(userID, teamID string, newCategory *model.SidebarCategoryWithChannels) (*model.SidebarCategoryWithChannels, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.CreateChannelSidebarCategory(userID, teamID, newCategory) + api.recordTime(startTime, "CreateChannelSidebarCategory", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetChannelSidebarCategories(userID, teamID string) (*model.OrderedSidebarCategories, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetChannelSidebarCategories(userID, teamID) + api.recordTime(startTime, "GetChannelSidebarCategories", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) UpdateChannelSidebarCategories(userID, teamID string, categories []*model.SidebarCategoryWithChannels) ([]*model.SidebarCategoryWithChannels, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.UpdateChannelSidebarCategories(userID, teamID, categories) + api.recordTime(startTime, "UpdateChannelSidebarCategories", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) SearchUsers(search *model.UserSearch) ([]*model.User, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.SearchUsers(search) + api.recordTime(startTime, "SearchUsers", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) SearchPostsInTeam(teamID string, paramsList []*model.SearchParams) ([]*model.Post, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.SearchPostsInTeam(teamID, paramsList) + api.recordTime(startTime, "SearchPostsInTeam", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) SearchPostsInTeamForUser(teamID string, userID string, searchParams model.SearchParameter) (*model.PostSearchResults, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.SearchPostsInTeamForUser(teamID, userID, searchParams) + api.recordTime(startTime, "SearchPostsInTeamForUser", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) AddChannelMember(channelId, userID string) (*model.ChannelMember, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.AddChannelMember(channelId, userID) + api.recordTime(startTime, "AddChannelMember", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) AddUserToChannel(channelId, userID, asUserId string) (*model.ChannelMember, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.AddUserToChannel(channelId, userID, asUserId) + api.recordTime(startTime, "AddUserToChannel", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetChannelMember(channelId, userID string) (*model.ChannelMember, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetChannelMember(channelId, userID) + api.recordTime(startTime, "GetChannelMember", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetChannelMembers(channelId string, page, perPage int) (model.ChannelMembers, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetChannelMembers(channelId, page, perPage) + api.recordTime(startTime, "GetChannelMembers", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetChannelMembersByIds(channelId string, userIds []string) (model.ChannelMembers, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetChannelMembersByIds(channelId, userIds) + api.recordTime(startTime, "GetChannelMembersByIds", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetChannelMembersForUser(teamID, userID string, page, perPage int) ([]*model.ChannelMember, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetChannelMembersForUser(teamID, userID, page, perPage) + api.recordTime(startTime, "GetChannelMembersForUser", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) UpdateChannelMemberRoles(channelId, userID, newRoles string) (*model.ChannelMember, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.UpdateChannelMemberRoles(channelId, userID, newRoles) + api.recordTime(startTime, "UpdateChannelMemberRoles", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) UpdateChannelMemberNotifications(channelId, userID string, notifications map[string]string) (*model.ChannelMember, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.UpdateChannelMemberNotifications(channelId, userID, notifications) + api.recordTime(startTime, "UpdateChannelMemberNotifications", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) PatchChannelMembersNotifications(members []*model.ChannelMemberIdentifier, notifyProps map[string]string) *model.AppError { + startTime := timePkg.Now() + _returnsA := api.apiImpl.PatchChannelMembersNotifications(members, notifyProps) + api.recordTime(startTime, "PatchChannelMembersNotifications", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) GetGroup(groupId string) (*model.Group, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetGroup(groupId) + api.recordTime(startTime, "GetGroup", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetGroupByName(name string) (*model.Group, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetGroupByName(name) + api.recordTime(startTime, "GetGroupByName", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetGroupMemberUsers(groupID string, page, perPage int) ([]*model.User, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetGroupMemberUsers(groupID, page, perPage) + api.recordTime(startTime, "GetGroupMemberUsers", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetGroupsBySource(groupSource model.GroupSource) ([]*model.Group, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetGroupsBySource(groupSource) + api.recordTime(startTime, "GetGroupsBySource", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetGroupsForUser(userID string) ([]*model.Group, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetGroupsForUser(userID) + api.recordTime(startTime, "GetGroupsForUser", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) DeleteChannelMember(channelId, userID string) *model.AppError { + startTime := timePkg.Now() + _returnsA := api.apiImpl.DeleteChannelMember(channelId, userID) + api.recordTime(startTime, "DeleteChannelMember", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) CreatePost(post *model.Post) (*model.Post, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.CreatePost(post) + api.recordTime(startTime, "CreatePost", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) AddReaction(reaction *model.Reaction) (*model.Reaction, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.AddReaction(reaction) + api.recordTime(startTime, "AddReaction", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) RemoveReaction(reaction *model.Reaction) *model.AppError { + startTime := timePkg.Now() + _returnsA := api.apiImpl.RemoveReaction(reaction) + api.recordTime(startTime, "RemoveReaction", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) GetReactions(postId string) ([]*model.Reaction, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetReactions(postId) + api.recordTime(startTime, "GetReactions", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) SendEphemeralPost(userID string, post *model.Post) *model.Post { + startTime := timePkg.Now() + _returnsA := api.apiImpl.SendEphemeralPost(userID, post) + api.recordTime(startTime, "SendEphemeralPost", true) + return _returnsA +} + +func (api *apiTimerLayer) UpdateEphemeralPost(userID string, post *model.Post) *model.Post { + startTime := timePkg.Now() + _returnsA := api.apiImpl.UpdateEphemeralPost(userID, post) + api.recordTime(startTime, "UpdateEphemeralPost", true) + return _returnsA +} + +func (api *apiTimerLayer) DeleteEphemeralPost(userID, postId string) { + startTime := timePkg.Now() + api.apiImpl.DeleteEphemeralPost(userID, postId) + api.recordTime(startTime, "DeleteEphemeralPost", true) +} + +func (api *apiTimerLayer) DeletePost(postId string) *model.AppError { + startTime := timePkg.Now() + _returnsA := api.apiImpl.DeletePost(postId) + api.recordTime(startTime, "DeletePost", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) GetPostThread(postId string) (*model.PostList, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetPostThread(postId) + api.recordTime(startTime, "GetPostThread", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetPost(postId string) (*model.Post, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetPost(postId) + api.recordTime(startTime, "GetPost", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetPostsSince(channelId string, time int64) (*model.PostList, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetPostsSince(channelId, time) + api.recordTime(startTime, "GetPostsSince", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetPostsAfter(channelId, postId string, page, perPage int) (*model.PostList, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetPostsAfter(channelId, postId, page, perPage) + api.recordTime(startTime, "GetPostsAfter", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetPostsBefore(channelId, postId string, page, perPage int) (*model.PostList, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetPostsBefore(channelId, postId, page, perPage) + api.recordTime(startTime, "GetPostsBefore", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetPostsForChannel(channelId string, page, perPage int) (*model.PostList, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetPostsForChannel(channelId, page, perPage) + api.recordTime(startTime, "GetPostsForChannel", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetTeamStats(teamID string) (*model.TeamStats, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetTeamStats(teamID) + api.recordTime(startTime, "GetTeamStats", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) UpdatePost(post *model.Post) (*model.Post, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.UpdatePost(post) + api.recordTime(startTime, "UpdatePost", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetProfileImage(userID string) ([]byte, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetProfileImage(userID) + api.recordTime(startTime, "GetProfileImage", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) SetProfileImage(userID string, data []byte) *model.AppError { + startTime := timePkg.Now() + _returnsA := api.apiImpl.SetProfileImage(userID, data) + api.recordTime(startTime, "SetProfileImage", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) GetEmojiList(sortBy string, page, perPage int) ([]*model.Emoji, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetEmojiList(sortBy, page, perPage) + api.recordTime(startTime, "GetEmojiList", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetEmojiByName(name string) (*model.Emoji, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetEmojiByName(name) + api.recordTime(startTime, "GetEmojiByName", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetEmoji(emojiId string) (*model.Emoji, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetEmoji(emojiId) + api.recordTime(startTime, "GetEmoji", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) CopyFileInfos(userID string, fileIds []string) ([]string, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.CopyFileInfos(userID, fileIds) + api.recordTime(startTime, "CopyFileInfos", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetFileInfo(fileId string) (*model.FileInfo, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetFileInfo(fileId) + api.recordTime(startTime, "GetFileInfo", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) SetFileSearchableContent(fileID string, content string) *model.AppError { + startTime := timePkg.Now() + _returnsA := api.apiImpl.SetFileSearchableContent(fileID, content) + api.recordTime(startTime, "SetFileSearchableContent", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) GetFileInfos(page, perPage int, opt *model.GetFileInfosOptions) ([]*model.FileInfo, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetFileInfos(page, perPage, opt) + api.recordTime(startTime, "GetFileInfos", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetFile(fileId string) ([]byte, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetFile(fileId) + api.recordTime(startTime, "GetFile", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetFileLink(fileId string) (string, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetFileLink(fileId) + api.recordTime(startTime, "GetFileLink", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) ReadFile(path string) ([]byte, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.ReadFile(path) + api.recordTime(startTime, "ReadFile", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetEmojiImage(emojiId string) ([]byte, string, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB, _returnsC := api.apiImpl.GetEmojiImage(emojiId) + api.recordTime(startTime, "GetEmojiImage", _returnsC == nil) + return _returnsA, _returnsB, _returnsC +} + +func (api *apiTimerLayer) UploadFile(data []byte, channelId string, filename string) (*model.FileInfo, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.UploadFile(data, channelId, filename) + api.recordTime(startTime, "UploadFile", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) OpenInteractiveDialog(dialog model.OpenDialogRequest) *model.AppError { + startTime := timePkg.Now() + _returnsA := api.apiImpl.OpenInteractiveDialog(dialog) + api.recordTime(startTime, "OpenInteractiveDialog", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) GetPlugins() ([]*model.Manifest, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetPlugins() + api.recordTime(startTime, "GetPlugins", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) EnablePlugin(id string) *model.AppError { + startTime := timePkg.Now() + _returnsA := api.apiImpl.EnablePlugin(id) + api.recordTime(startTime, "EnablePlugin", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) DisablePlugin(id string) *model.AppError { + startTime := timePkg.Now() + _returnsA := api.apiImpl.DisablePlugin(id) + api.recordTime(startTime, "DisablePlugin", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) RemovePlugin(id string) *model.AppError { + startTime := timePkg.Now() + _returnsA := api.apiImpl.RemovePlugin(id) + api.recordTime(startTime, "RemovePlugin", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) GetPluginStatus(id string) (*model.PluginStatus, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetPluginStatus(id) + api.recordTime(startTime, "GetPluginStatus", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) InstallPlugin(file io.Reader, replace bool) (*model.Manifest, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.InstallPlugin(file, replace) + api.recordTime(startTime, "InstallPlugin", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) KVSet(key string, value []byte) *model.AppError { + startTime := timePkg.Now() + _returnsA := api.apiImpl.KVSet(key, value) + api.recordTime(startTime, "KVSet", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) KVCompareAndSet(key string, oldValue, newValue []byte) (bool, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.KVCompareAndSet(key, oldValue, newValue) + api.recordTime(startTime, "KVCompareAndSet", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) KVCompareAndDelete(key string, oldValue []byte) (bool, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.KVCompareAndDelete(key, oldValue) + api.recordTime(startTime, "KVCompareAndDelete", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) KVSetWithOptions(key string, value []byte, options model.PluginKVSetOptions) (bool, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.KVSetWithOptions(key, value, options) + api.recordTime(startTime, "KVSetWithOptions", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) KVSetWithExpiry(key string, value []byte, expireInSeconds int64) *model.AppError { + startTime := timePkg.Now() + _returnsA := api.apiImpl.KVSetWithExpiry(key, value, expireInSeconds) + api.recordTime(startTime, "KVSetWithExpiry", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) KVGet(key string) ([]byte, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.KVGet(key) + api.recordTime(startTime, "KVGet", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) KVDelete(key string) *model.AppError { + startTime := timePkg.Now() + _returnsA := api.apiImpl.KVDelete(key) + api.recordTime(startTime, "KVDelete", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) KVDeleteAll() *model.AppError { + startTime := timePkg.Now() + _returnsA := api.apiImpl.KVDeleteAll() + api.recordTime(startTime, "KVDeleteAll", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) KVList(page, perPage int) ([]string, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.KVList(page, perPage) + api.recordTime(startTime, "KVList", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) PublishWebSocketEvent(event string, payload map[string]any, broadcast *model.WebsocketBroadcast) { + startTime := timePkg.Now() + api.apiImpl.PublishWebSocketEvent(event, payload, broadcast) + api.recordTime(startTime, "PublishWebSocketEvent", true) +} + +func (api *apiTimerLayer) HasPermissionTo(userID string, permission *model.Permission) bool { + startTime := timePkg.Now() + _returnsA := api.apiImpl.HasPermissionTo(userID, permission) + api.recordTime(startTime, "HasPermissionTo", true) + return _returnsA +} + +func (api *apiTimerLayer) HasPermissionToTeam(userID, teamID string, permission *model.Permission) bool { + startTime := timePkg.Now() + _returnsA := api.apiImpl.HasPermissionToTeam(userID, teamID, permission) + api.recordTime(startTime, "HasPermissionToTeam", true) + return _returnsA +} + +func (api *apiTimerLayer) HasPermissionToChannel(userID, channelId string, permission *model.Permission) bool { + startTime := timePkg.Now() + _returnsA := api.apiImpl.HasPermissionToChannel(userID, channelId, permission) + api.recordTime(startTime, "HasPermissionToChannel", true) + return _returnsA +} + +func (api *apiTimerLayer) RolesGrantPermission(roleNames []string, permissionId string) bool { + startTime := timePkg.Now() + _returnsA := api.apiImpl.RolesGrantPermission(roleNames, permissionId) + api.recordTime(startTime, "RolesGrantPermission", true) + return _returnsA +} + +func (api *apiTimerLayer) LogDebug(msg string, keyValuePairs ...any) { + startTime := timePkg.Now() + api.apiImpl.LogDebug(msg, keyValuePairs...) + api.recordTime(startTime, "LogDebug", true) +} + +func (api *apiTimerLayer) LogInfo(msg string, keyValuePairs ...any) { + startTime := timePkg.Now() + api.apiImpl.LogInfo(msg, keyValuePairs...) + api.recordTime(startTime, "LogInfo", true) +} + +func (api *apiTimerLayer) LogError(msg string, keyValuePairs ...any) { + startTime := timePkg.Now() + api.apiImpl.LogError(msg, keyValuePairs...) + api.recordTime(startTime, "LogError", true) +} + +func (api *apiTimerLayer) LogWarn(msg string, keyValuePairs ...any) { + startTime := timePkg.Now() + api.apiImpl.LogWarn(msg, keyValuePairs...) + api.recordTime(startTime, "LogWarn", true) +} + +func (api *apiTimerLayer) SendMail(to, subject, htmlBody string) *model.AppError { + startTime := timePkg.Now() + _returnsA := api.apiImpl.SendMail(to, subject, htmlBody) + api.recordTime(startTime, "SendMail", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) CreateBot(bot *model.Bot) (*model.Bot, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.CreateBot(bot) + api.recordTime(startTime, "CreateBot", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) PatchBot(botUserId string, botPatch *model.BotPatch) (*model.Bot, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.PatchBot(botUserId, botPatch) + api.recordTime(startTime, "PatchBot", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetBot(botUserId string, includeDeleted bool) (*model.Bot, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetBot(botUserId, includeDeleted) + api.recordTime(startTime, "GetBot", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetBots(options *model.BotGetOptions) ([]*model.Bot, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetBots(options) + api.recordTime(startTime, "GetBots", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) UpdateBotActive(botUserId string, active bool) (*model.Bot, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.UpdateBotActive(botUserId, active) + api.recordTime(startTime, "UpdateBotActive", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) PermanentDeleteBot(botUserId string) *model.AppError { + startTime := timePkg.Now() + _returnsA := api.apiImpl.PermanentDeleteBot(botUserId) + api.recordTime(startTime, "PermanentDeleteBot", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) PluginHTTP(request *http.Request) *http.Response { + startTime := timePkg.Now() + _returnsA := api.apiImpl.PluginHTTP(request) + api.recordTime(startTime, "PluginHTTP", true) + return _returnsA +} + +func (api *apiTimerLayer) PublishUserTyping(userID, channelId, parentId string) *model.AppError { + startTime := timePkg.Now() + _returnsA := api.apiImpl.PublishUserTyping(userID, channelId, parentId) + api.recordTime(startTime, "PublishUserTyping", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) CreateCommand(cmd *model.Command) (*model.Command, error) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.CreateCommand(cmd) + api.recordTime(startTime, "CreateCommand", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) ListCommands(teamID string) ([]*model.Command, error) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.ListCommands(teamID) + api.recordTime(startTime, "ListCommands", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) ListCustomCommands(teamID string) ([]*model.Command, error) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.ListCustomCommands(teamID) + api.recordTime(startTime, "ListCustomCommands", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) ListPluginCommands(teamID string) ([]*model.Command, error) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.ListPluginCommands(teamID) + api.recordTime(startTime, "ListPluginCommands", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) ListBuiltInCommands() ([]*model.Command, error) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.ListBuiltInCommands() + api.recordTime(startTime, "ListBuiltInCommands", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetCommand(commandID string) (*model.Command, error) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetCommand(commandID) + api.recordTime(startTime, "GetCommand", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) UpdateCommand(commandID string, updatedCmd *model.Command) (*model.Command, error) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.UpdateCommand(commandID, updatedCmd) + api.recordTime(startTime, "UpdateCommand", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) DeleteCommand(commandID string) error { + startTime := timePkg.Now() + _returnsA := api.apiImpl.DeleteCommand(commandID) + api.recordTime(startTime, "DeleteCommand", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) CreateOAuthApp(app *model.OAuthApp) (*model.OAuthApp, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.CreateOAuthApp(app) + api.recordTime(startTime, "CreateOAuthApp", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetOAuthApp(appID string) (*model.OAuthApp, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetOAuthApp(appID) + api.recordTime(startTime, "GetOAuthApp", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) UpdateOAuthApp(app *model.OAuthApp) (*model.OAuthApp, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.UpdateOAuthApp(app) + api.recordTime(startTime, "UpdateOAuthApp", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) DeleteOAuthApp(appID string) *model.AppError { + startTime := timePkg.Now() + _returnsA := api.apiImpl.DeleteOAuthApp(appID) + api.recordTime(startTime, "DeleteOAuthApp", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) PublishPluginClusterEvent(ev model.PluginClusterEvent, opts model.PluginClusterEventSendOptions) error { + startTime := timePkg.Now() + _returnsA := api.apiImpl.PublishPluginClusterEvent(ev, opts) + api.recordTime(startTime, "PublishPluginClusterEvent", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) RequestTrialLicense(requesterID string, users int, termsAccepted bool, receiveEmailsAccepted bool) *model.AppError { + startTime := timePkg.Now() + _returnsA := api.apiImpl.RequestTrialLicense(requesterID, users, termsAccepted, receiveEmailsAccepted) + api.recordTime(startTime, "RequestTrialLicense", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) GetCloudLimits() (*model.ProductLimits, error) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetCloudLimits() + api.recordTime(startTime, "GetCloudLimits", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) EnsureBotUser(bot *model.Bot) (string, error) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.EnsureBotUser(bot) + api.recordTime(startTime, "EnsureBotUser", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) RegisterCollectionAndTopic(collectionType, topicType string) error { + startTime := timePkg.Now() + _returnsA := api.apiImpl.RegisterCollectionAndTopic(collectionType, topicType) + api.recordTime(startTime, "RegisterCollectionAndTopic", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) CreateUploadSession(us *model.UploadSession) (*model.UploadSession, error) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.CreateUploadSession(us) + api.recordTime(startTime, "CreateUploadSession", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) UploadData(us *model.UploadSession, rd io.Reader) (*model.FileInfo, error) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.UploadData(us, rd) + api.recordTime(startTime, "UploadData", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetUploadSession(uploadID string) (*model.UploadSession, error) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetUploadSession(uploadID) + api.recordTime(startTime, "GetUploadSession", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) SendPushNotification(notification *model.PushNotification, userID string) *model.AppError { + startTime := timePkg.Now() + _returnsA := api.apiImpl.SendPushNotification(notification, userID) + api.recordTime(startTime, "SendPushNotification", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) UpdateUserAuth(userID string, userAuth *model.UserAuth) (*model.UserAuth, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.UpdateUserAuth(userID, userAuth) + api.recordTime(startTime, "UpdateUserAuth", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) RegisterPluginForSharedChannels(opts model.RegisterPluginOpts) (remoteID string, err error) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.RegisterPluginForSharedChannels(opts) + api.recordTime(startTime, "RegisterPluginForSharedChannels", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) UnregisterPluginForSharedChannels(pluginID string) error { + startTime := timePkg.Now() + _returnsA := api.apiImpl.UnregisterPluginForSharedChannels(pluginID) + api.recordTime(startTime, "UnregisterPluginForSharedChannels", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) ShareChannel(sc *model.SharedChannel) (*model.SharedChannel, error) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.ShareChannel(sc) + api.recordTime(startTime, "ShareChannel", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) UpdateSharedChannel(sc *model.SharedChannel) (*model.SharedChannel, error) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.UpdateSharedChannel(sc) + api.recordTime(startTime, "UpdateSharedChannel", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) UnshareChannel(channelID string) (unshared bool, err error) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.UnshareChannel(channelID) + api.recordTime(startTime, "UnshareChannel", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) UpdateSharedChannelCursor(channelID, remoteID string, cusror model.GetPostsSinceForSyncCursor) error { + startTime := timePkg.Now() + _returnsA := api.apiImpl.UpdateSharedChannelCursor(channelID, remoteID, cusror) + api.recordTime(startTime, "UpdateSharedChannelCursor", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) SyncSharedChannel(channelID string) error { + startTime := timePkg.Now() + _returnsA := api.apiImpl.SyncSharedChannel(channelID) + api.recordTime(startTime, "SyncSharedChannel", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) InviteRemoteToChannel(channelID string, remoteID string, userID string, shareIfNotShared bool) error { + startTime := timePkg.Now() + _returnsA := api.apiImpl.InviteRemoteToChannel(channelID, remoteID, userID, shareIfNotShared) + api.recordTime(startTime, "InviteRemoteToChannel", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) UninviteRemoteFromChannel(channelID string, remoteID string) error { + startTime := timePkg.Now() + _returnsA := api.apiImpl.UninviteRemoteFromChannel(channelID, remoteID) + api.recordTime(startTime, "UninviteRemoteFromChannel", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) UpsertGroupMember(groupID string, userID string) (*model.GroupMember, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.UpsertGroupMember(groupID, userID) + api.recordTime(startTime, "UpsertGroupMember", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) UpsertGroupMembers(groupID string, userIDs []string) ([]*model.GroupMember, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.UpsertGroupMembers(groupID, userIDs) + api.recordTime(startTime, "UpsertGroupMembers", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetGroupByRemoteID(remoteID string, groupSource model.GroupSource) (*model.Group, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetGroupByRemoteID(remoteID, groupSource) + api.recordTime(startTime, "GetGroupByRemoteID", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) CreateGroup(group *model.Group) (*model.Group, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.CreateGroup(group) + api.recordTime(startTime, "CreateGroup", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) UpdateGroup(group *model.Group) (*model.Group, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.UpdateGroup(group) + api.recordTime(startTime, "UpdateGroup", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) DeleteGroup(groupID string) (*model.Group, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.DeleteGroup(groupID) + api.recordTime(startTime, "DeleteGroup", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) RestoreGroup(groupID string) (*model.Group, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.RestoreGroup(groupID) + api.recordTime(startTime, "RestoreGroup", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) DeleteGroupMember(groupID string, userID string) (*model.GroupMember, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.DeleteGroupMember(groupID, userID) + api.recordTime(startTime, "DeleteGroupMember", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetGroupSyncable(groupID string, syncableID string, syncableType model.GroupSyncableType) (*model.GroupSyncable, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetGroupSyncable(groupID, syncableID, syncableType) + api.recordTime(startTime, "GetGroupSyncable", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetGroupSyncables(groupID string, syncableType model.GroupSyncableType) ([]*model.GroupSyncable, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetGroupSyncables(groupID, syncableType) + api.recordTime(startTime, "GetGroupSyncables", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) UpsertGroupSyncable(groupSyncable *model.GroupSyncable) (*model.GroupSyncable, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.UpsertGroupSyncable(groupSyncable) + api.recordTime(startTime, "UpsertGroupSyncable", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) UpdateGroupSyncable(groupSyncable *model.GroupSyncable) (*model.GroupSyncable, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.UpdateGroupSyncable(groupSyncable) + api.recordTime(startTime, "UpdateGroupSyncable", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) DeleteGroupSyncable(groupID string, syncableID string, syncableType model.GroupSyncableType) (*model.GroupSyncable, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.DeleteGroupSyncable(groupID, syncableID, syncableType) + api.recordTime(startTime, "DeleteGroupSyncable", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) UpdateUserRoles(userID, newRoles string) (*model.User, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.UpdateUserRoles(userID, newRoles) + api.recordTime(startTime, "UpdateUserRoles", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetPluginID() string { + startTime := timePkg.Now() + _returnsA := api.apiImpl.GetPluginID() + api.recordTime(startTime, "GetPluginID", true) + return _returnsA +} + +func (api *apiTimerLayer) GetGroups(page, perPage int, opts model.GroupSearchOpts, viewRestrictions *model.ViewUsersRestrictions) ([]*model.Group, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetGroups(page, perPage, opts, viewRestrictions) + api.recordTime(startTime, "GetGroups", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) CreateDefaultSyncableMemberships(params model.CreateDefaultMembershipParams) *model.AppError { + startTime := timePkg.Now() + _returnsA := api.apiImpl.CreateDefaultSyncableMemberships(params) + api.recordTime(startTime, "CreateDefaultSyncableMemberships", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) DeleteGroupConstrainedMemberships() *model.AppError { + startTime := timePkg.Now() + _returnsA := api.apiImpl.DeleteGroupConstrainedMemberships() + api.recordTime(startTime, "DeleteGroupConstrainedMemberships", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) CreatePropertyField(field *model.PropertyField) (*model.PropertyField, error) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.CreatePropertyField(field) + api.recordTime(startTime, "CreatePropertyField", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetPropertyField(groupID, fieldID string) (*model.PropertyField, error) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetPropertyField(groupID, fieldID) + api.recordTime(startTime, "GetPropertyField", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetPropertyFields(groupID string, ids []string) ([]*model.PropertyField, error) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetPropertyFields(groupID, ids) + api.recordTime(startTime, "GetPropertyFields", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) UpdatePropertyField(groupID string, field *model.PropertyField) (*model.PropertyField, error) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.UpdatePropertyField(groupID, field) + api.recordTime(startTime, "UpdatePropertyField", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) DeletePropertyField(groupID, fieldID string) error { + startTime := timePkg.Now() + _returnsA := api.apiImpl.DeletePropertyField(groupID, fieldID) + api.recordTime(startTime, "DeletePropertyField", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) SearchPropertyFields(groupID string, opts model.PropertyFieldSearchOpts) ([]*model.PropertyField, error) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.SearchPropertyFields(groupID, opts) + api.recordTime(startTime, "SearchPropertyFields", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) CountPropertyFields(groupID string, includeDeleted bool) (int64, error) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.CountPropertyFields(groupID, includeDeleted) + api.recordTime(startTime, "CountPropertyFields", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) CountPropertyFieldsForTarget(groupID, targetType, targetID string, includeDeleted bool) (int64, error) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.CountPropertyFieldsForTarget(groupID, targetType, targetID, includeDeleted) + api.recordTime(startTime, "CountPropertyFieldsForTarget", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) CreatePropertyValue(value *model.PropertyValue) (*model.PropertyValue, error) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.CreatePropertyValue(value) + api.recordTime(startTime, "CreatePropertyValue", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetPropertyValue(groupID, valueID string) (*model.PropertyValue, error) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetPropertyValue(groupID, valueID) + api.recordTime(startTime, "GetPropertyValue", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetPropertyValues(groupID string, ids []string) ([]*model.PropertyValue, error) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetPropertyValues(groupID, ids) + api.recordTime(startTime, "GetPropertyValues", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) UpdatePropertyValue(groupID string, value *model.PropertyValue) (*model.PropertyValue, error) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.UpdatePropertyValue(groupID, value) + api.recordTime(startTime, "UpdatePropertyValue", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) UpsertPropertyValue(value *model.PropertyValue) (*model.PropertyValue, error) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.UpsertPropertyValue(value) + api.recordTime(startTime, "UpsertPropertyValue", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) DeletePropertyValue(groupID, valueID string) error { + startTime := timePkg.Now() + _returnsA := api.apiImpl.DeletePropertyValue(groupID, valueID) + api.recordTime(startTime, "DeletePropertyValue", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) SearchPropertyValues(groupID string, opts model.PropertyValueSearchOpts) ([]*model.PropertyValue, error) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.SearchPropertyValues(groupID, opts) + api.recordTime(startTime, "SearchPropertyValues", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) RegisterPropertyGroup(name string) (*model.PropertyGroup, error) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.RegisterPropertyGroup(name) + api.recordTime(startTime, "RegisterPropertyGroup", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetPropertyGroup(name string) (*model.PropertyGroup, error) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetPropertyGroup(name) + api.recordTime(startTime, "GetPropertyGroup", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) GetPropertyFieldByName(groupID, targetID, name string) (*model.PropertyField, error) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.GetPropertyFieldByName(groupID, targetID, name) + api.recordTime(startTime, "GetPropertyFieldByName", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) UpdatePropertyFields(groupID string, fields []*model.PropertyField) ([]*model.PropertyField, error) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.UpdatePropertyFields(groupID, fields) + api.recordTime(startTime, "UpdatePropertyFields", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) UpdatePropertyValues(groupID string, values []*model.PropertyValue) ([]*model.PropertyValue, error) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.UpdatePropertyValues(groupID, values) + api.recordTime(startTime, "UpdatePropertyValues", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) UpsertPropertyValues(values []*model.PropertyValue) ([]*model.PropertyValue, error) { + startTime := timePkg.Now() + _returnsA, _returnsB := api.apiImpl.UpsertPropertyValues(values) + api.recordTime(startTime, "UpsertPropertyValues", _returnsB == nil) + return _returnsA, _returnsB +} + +func (api *apiTimerLayer) DeletePropertyValuesForTarget(groupID, targetType, targetID string) error { + startTime := timePkg.Now() + _returnsA := api.apiImpl.DeletePropertyValuesForTarget(groupID, targetType, targetID) + api.recordTime(startTime, "DeletePropertyValuesForTarget", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) DeletePropertyValuesForField(groupID, fieldID string) error { + startTime := timePkg.Now() + _returnsA := api.apiImpl.DeletePropertyValuesForField(groupID, fieldID) + api.recordTime(startTime, "DeletePropertyValuesForField", _returnsA == nil) + return _returnsA +} + +func (api *apiTimerLayer) LogAuditRec(rec *model.AuditRecord) { + startTime := timePkg.Now() + api.apiImpl.LogAuditRec(rec) + api.recordTime(startTime, "LogAuditRec", true) +} + +func (api *apiTimerLayer) LogAuditRecWithLevel(rec *model.AuditRecord, level mlog.Level) { + startTime := timePkg.Now() + api.apiImpl.LogAuditRecWithLevel(rec, level) + api.recordTime(startTime, "LogAuditRecWithLevel", true) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/plugin/audit.go b/vendor/github.com/mattermost/mattermost/server/public/plugin/audit.go new file mode 100644 index 00000000..b117efaa --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/plugin/audit.go @@ -0,0 +1,70 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package plugin + +import ( + "encoding/json" + + "github.com/mattermost/mattermost/server/public/model" +) + +// MakeAuditRecord creates a new audit record with basic information for plugin use. +// This function creates a minimal audit record that can be populated with additional data. +// Use this when you don't have access to request context or want to manually populate fields. +func MakeAuditRecord(event string, initialStatus string) *model.AuditRecord { + return &model.AuditRecord{ + EventName: event, + Status: initialStatus, + Meta: make(map[string]any), + Actor: model.AuditEventActor{ + UserId: "", + SessionId: "", + Client: "", + IpAddress: "", + XForwardedFor: "", + }, + EventData: model.AuditEventData{ + Parameters: map[string]any{}, + PriorState: make(map[string]any), + ResultState: make(map[string]any), + ObjectType: "", + }, + } +} + +// MakeAuditRecordWithContext creates a new audit record populated with plugin context information. +// This is the recommended way for plugins to create audit records when they have request context. +// The Context should come from plugin hook parameters or HTTP request handlers. +func MakeAuditRecordWithContext(event string, initialStatus string, ctx *Context, userId, apiPath string) *model.AuditRecord { + rec := MakeAuditRecord(event, initialStatus) + rec.AddMeta(model.AuditKeyAPIPath, apiPath) + rec.Actor.UserId = userId + rec.Actor.SessionId = ctx.SessionId + rec.Actor.Client = ctx.UserAgent + rec.Actor.IpAddress = ctx.IPAddress + return rec +} + +func makeAuditRecordGobSafe(record model.AuditRecord) model.AuditRecord { + record.EventData.Parameters = makeMapGobSafe(record.EventData.Parameters) + record.EventData.PriorState = makeMapGobSafe(record.EventData.PriorState) + record.EventData.ResultState = makeMapGobSafe(record.EventData.ResultState) + record.Meta = makeMapGobSafe(record.Meta) + return record +} + +// makeMapGobSafe converts map data to a gob-safe representation via JSON round-trip. +// This eliminates problematic types like nil pointers in interfaces that cause gob +// encoding to fail when sending audit data over RPC via the plugin API. +func makeMapGobSafe(m map[string]any) map[string]any { + jsonBytes, err := json.Marshal(m) + if err != nil { + return map[string]any{"error": "failed to serialize audit data"} + } + var gobSafe map[string]any + if err := json.Unmarshal(jsonBytes, &gobSafe); err != nil { + return map[string]any{"error": "failed to deserialize audit data"} + } + return gobSafe +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/plugin/client.go b/vendor/github.com/mattermost/mattermost/server/public/plugin/client.go new file mode 100644 index 00000000..56b42f30 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/plugin/client.go @@ -0,0 +1,106 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package plugin + +import ( + "context" + + "github.com/hashicorp/go-plugin" +) + +const ( + InternalKeyPrefix = "mmi_" + BotUserKey = InternalKeyPrefix + "botid" +) + +// WithTestContext provides a context typically used to terminate a plugin from a unit test. +func WithTestContext(ctx context.Context) func(*plugin.ServeConfig) error { + return func(config *plugin.ServeConfig) error { + if config.Test == nil { + config.Test = &plugin.ServeTestConfig{} + } + + config.Test.Context = ctx + + return nil + } +} + +// WithTestReattachConfigCh configures the channel to receive the ReattachConfig used to reattach +// an externally launched plugin instance with the Mattermost server. +func WithTestReattachConfigCh(reattachConfigCh chan<- *plugin.ReattachConfig) func(*plugin.ServeConfig) error { + return func(config *plugin.ServeConfig) error { + if config.Test == nil { + config.Test = &plugin.ServeTestConfig{} + } + + config.Test.ReattachConfigCh = reattachConfigCh + + return nil + } +} + +// WithTestCloseCh provides a channel that signals when the plugin exits. +func WithTestCloseCh(closeCh chan<- struct{}) func(*plugin.ServeConfig) error { + return func(config *plugin.ServeConfig) error { + if config.Test == nil { + config.Test = &plugin.ServeTestConfig{} + } + + config.Test.CloseCh = closeCh + + return nil + } +} + +// Starts the serving of a Mattermost plugin over net/rpc. gRPC is not supported. +// +// Call this when your plugin is ready to start. Options allow configuring plugins for testing +// scenarios. +func ClientMain(pluginImplementation any, opts ...func(config *plugin.ServeConfig) error) { + impl, ok := pluginImplementation.(interface { + SetAPI(api API) + SetDriver(driver Driver) + }) + if !ok { + panic("Plugin implementation given must embed plugin.MattermostPlugin") + } + impl.SetAPI(nil) + impl.SetDriver(nil) + + pluginMap := map[string]plugin.Plugin{ + "hooks": &hooksPlugin{hooks: pluginImplementation}, + } + + serveConfig := &plugin.ServeConfig{ + HandshakeConfig: handshake, + Plugins: pluginMap, + } + + for _, opt := range opts { + err := opt(serveConfig) + if err != nil { + panic("failed to start serving plugin: " + err.Error()) + } + } + + plugin.Serve(serveConfig) +} + +type MattermostPlugin struct { + // API exposes the plugin api, and becomes available just prior to the OnActive hook. + API API + Driver Driver +} + +// SetAPI persists the given API interface to the plugin. It is invoked just prior to the +// OnActivate hook, exposing the API for use by the plugin. +func (p *MattermostPlugin) SetAPI(api API) { + p.API = api +} + +// SetDriver sets the RPC client implementation to talk with the server. +func (p *MattermostPlugin) SetDriver(driver Driver) { + p.Driver = driver +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/plugin/client_rpc.go b/vendor/github.com/mattermost/mattermost/server/public/plugin/client_rpc.go new file mode 100644 index 00000000..edbd3c04 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/plugin/client_rpc.go @@ -0,0 +1,1309 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +//go:generate go run interface_generator/main.go + +package plugin + +import ( + "bytes" + "database/sql" + "database/sql/driver" + "encoding/gob" + "encoding/json" + "fmt" + "io" + "log" + "net/http" + "net/rpc" + "net/url" + "os" + "reflect" + "sync" + + "github.com/hashicorp/go-plugin" + "github.com/lib/pq" + + "github.com/mattermost/mattermost/server/public/model" + "github.com/mattermost/mattermost/server/public/shared/mlog" +) + +var hookNameToId = make(map[string]int) + +type hooksRPCClient struct { + client *rpc.Client + log *mlog.Logger + muxBroker *plugin.MuxBroker + apiImpl API + driver Driver + implemented [TotalHooksID]bool + doneWg sync.WaitGroup +} + +type hooksRPCServer struct { + impl any + muxBroker *plugin.MuxBroker + apiRPCClient *apiRPCClient +} + +// Implements hashicorp/go-plugin/plugin.Plugin interface to connect the hooks of a plugin +type hooksPlugin struct { + hooks any + apiImpl API + driverImpl Driver + log *mlog.Logger +} + +func (p *hooksPlugin) Server(b *plugin.MuxBroker) (any, error) { + return &hooksRPCServer{impl: p.hooks, muxBroker: b}, nil +} + +func (p *hooksPlugin) Client(b *plugin.MuxBroker, client *rpc.Client) (any, error) { + return &hooksRPCClient{ + client: client, + log: p.log, + muxBroker: b, + apiImpl: p.apiImpl, + driver: p.driverImpl, + }, nil +} + +type apiRPCClient struct { + client *rpc.Client + muxBroker *plugin.MuxBroker +} + +type apiRPCServer struct { + impl API + muxBroker *plugin.MuxBroker +} + +// ErrorString is a fallback for sending unregistered implementations of the error interface across +// rpc. For example, the errorString type from the github.com/pkg/errors package cannot be +// registered since it is not exported, but this precludes common error handling paradigms. +// ErrorString merely preserves the string description of the error, while satisfying the error +// interface itself to allow other registered types (such as model.AppError) to be sent unmodified. +type ErrorString struct { + Code int // Code to map to various error variables + Err string +} + +func (e ErrorString) Error() string { + return e.Err +} + +func encodableError(err error) error { + if err == nil { + return nil + } + if _, ok := err.(*model.AppError); ok { + return err + } + + if _, ok := err.(*pq.Error); ok { + return err + } + + ret := &ErrorString{ + Err: err.Error(), + } + + switch err { + case io.EOF: + ret.Code = 1 + case sql.ErrNoRows: + ret.Code = 2 + case sql.ErrConnDone: + ret.Code = 3 + case sql.ErrTxDone: + ret.Code = 4 + case driver.ErrSkip: + ret.Code = 5 + case driver.ErrBadConn: + ret.Code = 6 + case driver.ErrRemoveArgument: + ret.Code = 7 + } + + return ret +} + +func decodableError(err error) error { + if encErr, ok := err.(*ErrorString); ok { + switch encErr.Code { + case 1: + return io.EOF + case 2: + return sql.ErrNoRows + case 3: + return sql.ErrConnDone + case 4: + return sql.ErrTxDone + case 5: + return driver.ErrSkip + case 6: + return driver.ErrBadConn + case 7: + return driver.ErrRemoveArgument + } + } + return err +} + +// Registering some types used by MM for encoding/gob used by rpc +func init() { + gob.Register([]*model.SlackAttachment{}) + gob.Register([]any{}) + gob.Register(map[string]any{}) + gob.Register(&model.AppError{}) + gob.Register(&pq.Error{}) + gob.Register(&ErrorString{}) + gob.Register(&model.AutocompleteDynamicListArg{}) + gob.Register(&model.AutocompleteStaticListArg{}) + gob.Register(&model.AutocompleteTextArg{}) + gob.Register(&model.PreviewPost{}) + gob.Register(model.PropertyOptions[*model.PluginPropertyOption]{}) +} + +// These enforce compile time checks to make sure types implement the interface +// If you are getting an error here, you probably need to run `make pluginapi` to +// autogenerate RPC glue code +var ( + _ plugin.Plugin = &hooksPlugin{} + _ Hooks = &hooksRPCClient{} +) + +// +// Below are special cases for hooks or APIs that can not be auto generated +// + +func (g *hooksRPCClient) Implemented() (impl []string, err error) { + err = g.client.Call("Plugin.Implemented", struct{}{}, &impl) + for _, hookName := range impl { + if hookId, ok := hookNameToId[hookName]; ok { + g.implemented[hookId] = true + } + } + return +} + +// Implemented replies with the names of the hooks that are implemented. +func (s *hooksRPCServer) Implemented(args struct{}, reply *[]string) error { + ifaceType := reflect.TypeFor[Hooks]() + implType := reflect.TypeOf(s.impl) + selfType := reflect.TypeFor[*hooksRPCServer]() + var methods []string + for i := 0; i < ifaceType.NumMethod(); i++ { + method := ifaceType.Method(i) + m, ok := implType.MethodByName(method.Name) + if !ok { + continue + } else if m.Type.NumIn() != method.Type.NumIn()+1 { + continue + } else if m.Type.NumOut() != method.Type.NumOut() { + continue + } + match := true + for j := 0; j < method.Type.NumIn(); j++ { + if m.Type.In(j+1) != method.Type.In(j) { + match = false + break + } + } + for j := 0; j < method.Type.NumOut(); j++ { + if m.Type.Out(j) != method.Type.Out(j) { + match = false + break + } + } + if !match { + continue + } + if _, ok := selfType.MethodByName(method.Name); !ok { + continue + } + methods = append(methods, method.Name) + } + *reply = methods + return encodableError(nil) +} + +type Z_OnActivateArgs struct { + APIMuxId uint32 + DriverMuxId uint32 +} + +type Z_OnActivateReturns struct { + A error +} + +func (g *hooksRPCClient) OnActivate() error { + muxId := g.muxBroker.NextId() + g.doneWg.Add(1) + go func() { + defer g.doneWg.Done() + g.muxBroker.AcceptAndServe(muxId, &apiRPCServer{ + impl: g.apiImpl, + muxBroker: g.muxBroker, + }) + }() + + nextID := g.muxBroker.NextId() + g.doneWg.Add(1) + go func() { + defer g.doneWg.Done() + g.muxBroker.AcceptAndServe(nextID, &dbRPCServer{ + dbImpl: g.driver, + }) + }() + + _args := &Z_OnActivateArgs{ + APIMuxId: muxId, + DriverMuxId: nextID, + } + _returns := &Z_OnActivateReturns{} + + if err := g.client.Call("Plugin.OnActivate", _args, _returns); err != nil { + g.log.Error("RPC call to OnActivate plugin failed.", mlog.Err(err)) + } + return _returns.A +} + +func (s *hooksRPCServer) OnActivate(args *Z_OnActivateArgs, returns *Z_OnActivateReturns) error { + connection, err := s.muxBroker.Dial(args.APIMuxId) + if err != nil { + return err + } + + conn2, err := s.muxBroker.Dial(args.DriverMuxId) + if err != nil { + return err + } + + s.apiRPCClient = &apiRPCClient{ + client: rpc.NewClient(connection), + muxBroker: s.muxBroker, + } + + dbClient := &dbRPCClient{ + client: rpc.NewClient(conn2), + } + + if mmplugin, ok := s.impl.(interface { + SetAPI(api API) + SetDriver(driver Driver) + }); ok { + mmplugin.SetAPI(s.apiRPCClient) + mmplugin.SetDriver(dbClient) + } + + if mmplugin, ok := s.impl.(interface { + OnConfigurationChange() error + }); ok { + if err := mmplugin.OnConfigurationChange(); err != nil { + fmt.Fprintf(os.Stderr, "[ERROR] call to OnConfigurationChange failed, error: %v", err.Error()) + } + } + + // Capture output of standard logger because go-plugin + // redirects it. + log.SetOutput(os.Stderr) + + if hook, ok := s.impl.(interface { + OnActivate() error + }); ok { + returns.A = encodableError(hook.OnActivate()) + } + return nil +} + +type Z_LoadPluginConfigurationArgsArgs struct{} + +type Z_LoadPluginConfigurationArgsReturns struct { + A []byte +} + +func (g *apiRPCClient) LoadPluginConfiguration(dest any) error { + _args := &Z_LoadPluginConfigurationArgsArgs{} + _returns := &Z_LoadPluginConfigurationArgsReturns{} + if err := g.client.Call("Plugin.LoadPluginConfiguration", _args, _returns); err != nil { + log.Printf("RPC call to LoadPluginConfiguration API failed: %s", err.Error()) + } + if err := json.Unmarshal(_returns.A, dest); err != nil { + log.Printf("LoadPluginConfiguration API failed to unmarshal: %s", err.Error()) + } + return nil +} + +func (s *apiRPCServer) LoadPluginConfiguration(args *Z_LoadPluginConfigurationArgsArgs, returns *Z_LoadPluginConfigurationArgsReturns) error { + var config any + if hook, ok := s.impl.(interface { + LoadPluginConfiguration(dest any) error + }); ok { + if err := hook.LoadPluginConfiguration(&config); err != nil { + return err + } + } + b, err := json.Marshal(config) + if err != nil { + return err + } + returns.A = b + return nil +} + +func init() { + hookNameToId["ServeHTTP"] = ServeHTTPID +} + +// Using a subset of http.Request prevents a known incompatibility when decoding Go v1.23+ gob-encoded x509.Certificate +// structs from Go v1.22 compiled plugins. These come from http.Request.TLS field (*tls.ConnectionState). +type HTTPRequestSubset struct { + Method string + URL *url.URL + Proto string + ProtoMajor int + ProtoMinor int + Header http.Header + Host string + RemoteAddr string + RequestURI string + Body io.ReadCloser +} + +func (r *HTTPRequestSubset) GetHTTPRequest() *http.Request { + return &http.Request{ + Method: r.Method, + URL: r.URL, + Proto: r.Proto, + ProtoMajor: r.ProtoMajor, + ProtoMinor: r.ProtoMinor, + Header: r.Header, + Host: r.Host, + RemoteAddr: r.RemoteAddr, + RequestURI: r.RequestURI, + Body: r.Body, + } +} + +type Z_ServeHTTPArgs struct { + ResponseWriterStream uint32 + Request *HTTPRequestSubset + Context *Context + RequestBodyStream uint32 +} + +func (g *hooksRPCClient) ServeHTTP(c *Context, w http.ResponseWriter, r *http.Request) { + if !g.implemented[ServeHTTPID] { + http.NotFound(w, r) + return + } + + serveHTTPStreamId := g.muxBroker.NextId() + go func() { + connection, err := g.muxBroker.Accept(serveHTTPStreamId) + if err != nil { + g.log.Error("Plugin failed to ServeHTTP, muxBroker couldn't accept connection", mlog.Uint("serve_http_stream_id", serveHTTPStreamId), mlog.Err(err)) + return + } + defer connection.Close() + + rpcServer := rpc.NewServer() + if err := rpcServer.RegisterName("Plugin", &httpResponseWriterRPCServer{w: w, log: g.log}); err != nil { + g.log.Error("Plugin failed to ServeHTTP, couldn't register RPC name", mlog.Err(err)) + return + } + rpcServer.ServeConn(connection) + }() + + requestBodyStreamId := uint32(0) + if r.Body != nil { + requestBodyStreamId = g.muxBroker.NextId() + go func() { + bodyConnection, err := g.muxBroker.Accept(requestBodyStreamId) + if err != nil { + g.log.Error("Plugin failed to ServeHTTP, muxBroker couldn't Accept request body connection", mlog.Err(err)) + return + } + defer bodyConnection.Close() + serveIOReader(r.Body, bodyConnection) + }() + } + + forwardedRequest := &HTTPRequestSubset{ + Method: r.Method, + URL: r.URL, + Proto: r.Proto, + ProtoMajor: r.ProtoMajor, + ProtoMinor: r.ProtoMinor, + Header: r.Header, + Host: r.Host, + RemoteAddr: r.RemoteAddr, + RequestURI: r.RequestURI, + } + + if err := g.client.Call("Plugin.ServeHTTP", Z_ServeHTTPArgs{ + Context: c, + ResponseWriterStream: serveHTTPStreamId, + Request: forwardedRequest, + RequestBodyStream: requestBodyStreamId, + }, nil); err != nil { + g.log.Error("Plugin failed to ServeHTTP, RPC call failed", mlog.Err(err)) + http.Error(w, "500 internal server error", http.StatusInternalServerError) + } +} + +func (s *hooksRPCServer) ServeHTTP(args *Z_ServeHTTPArgs, returns *struct{}) error { + connection, err := s.muxBroker.Dial(args.ResponseWriterStream) + if err != nil { + fmt.Fprintf(os.Stderr, "[ERROR] Can't connect to remote response writer stream, error: %v", err.Error()) + return err + } + w := connectHTTPResponseWriter(connection) + defer w.Close() + + r := args.Request + if args.RequestBodyStream != 0 { + connection, err := s.muxBroker.Dial(args.RequestBodyStream) + if err != nil { + fmt.Fprintf(os.Stderr, "[ERROR] Can't connect to remote request body stream, error: %v", err.Error()) + return err + } + r.Body = connectIOReader(connection) + } else { + r.Body = io.NopCloser(&bytes.Buffer{}) + } + defer r.Body.Close() + + httpReq := r.GetHTTPRequest() + + if hook, ok := s.impl.(interface { + ServeHTTP(c *Context, w http.ResponseWriter, r *http.Request) + }); ok { + hook.ServeHTTP(args.Context, w, httpReq) + } else { + http.NotFound(w, httpReq) + } + + return nil +} + +// PluginHTTPStream - Streaming version of PluginHTTP that uses MuxBroker for streaming request/response bodies. +// This avoids buffering large payloads in memory. + +// Legacy buffered structs (kept for backward compatibility with old servers) +type Z_PluginHTTPArgs struct { + Request *HTTPRequestSubset + RequestBody []byte +} + +type Z_PluginHTTPReturns struct { + Response *http.Response + ResponseBody []byte +} + +// New streaming structs +type Z_PluginHTTPStreamArgs struct { + ResponseBodyStream uint32 + Request *HTTPRequestSubset + RequestBodyStream uint32 +} + +type Z_PluginHTTPStreamReturns struct { + StatusCode int + Header http.Header +} + +func (g *apiRPCClient) PluginHTTP(request *http.Request) *http.Response { + // Try to use the streaming version first (if server supports it) + // Fall back to buffered version if not available (signaled by nil) + response, err := g.pluginHTTPStream(request) + if err != nil { + // If we error for some other reason other than stream not being + // implemented just report and fail + log.Print(err.Error()) + return nil + } + if response != nil { + return response + } + + // Fallback to buffered version + return g.pluginHTTPBuffered(request) +} + +// pluginHTTPStream attempts to use the new streaming endpoint +func (g *apiRPCClient) pluginHTTPStream(request *http.Request) (*http.Response, error) { + // Set up request body stream + requestBodyStreamId := uint32(0) + if request.Body != nil { + requestBodyStreamId = g.muxBroker.NextId() + go func() { + bodyConnection, err := g.muxBroker.Accept(requestBodyStreamId) + if err != nil { + log.Printf("Plugin failed to accept request body connection for PluginHTTPStream: %s", err.Error()) + return + } + defer bodyConnection.Close() + serveIOReader(request.Body, bodyConnection) + }() + } + + // Set up response body stream + responseBodyStreamId := g.muxBroker.NextId() + responsePipe := make(chan io.ReadCloser, 1) + + go func() { + connection, err := g.muxBroker.Accept(responseBodyStreamId) + if err != nil { + log.Printf("Plugin failed to accept response body connection for PluginHTTPStream: %s", err.Error()) + responsePipe <- nil + return + } + // Don't close connection here - it will be closed when response body is read + responsePipe <- connectIOReader(connection) + }() + + forwardedRequest := &HTTPRequestSubset{ + Method: request.Method, + URL: request.URL, + Proto: request.Proto, + ProtoMajor: request.ProtoMajor, + ProtoMinor: request.ProtoMinor, + Header: request.Header, + Host: request.Host, + RemoteAddr: request.RemoteAddr, + RequestURI: request.RequestURI, + } + + _args := &Z_PluginHTTPStreamArgs{ + ResponseBodyStream: responseBodyStreamId, + Request: forwardedRequest, + RequestBodyStream: requestBodyStreamId, + } + + _returns := &Z_PluginHTTPStreamReturns{} + if err := g.client.Call("Plugin.PluginHTTPStream", _args, _returns); err != nil { + // If the method doesn't exist, return nil to trigger fallback + if err.Error() == "rpc: can't find method Plugin.PluginHTTPStream" { + return nil, nil + } + return nil, fmt.Errorf("RPC call to PluginHTTPStream API failed: %w", err) + } + + // Wait for response body reader + responseBody := <-responsePipe + if responseBody == nil { + return nil, fmt.Errorf("Failed to get response body stream for PluginHTTPStream") + } + + // Create response with streamed body + response := &http.Response{ + StatusCode: _returns.StatusCode, + Header: _returns.Header, + Body: responseBody, + Proto: request.Proto, + ProtoMajor: request.ProtoMajor, + ProtoMinor: request.ProtoMinor, + } + + return response, nil +} + +// pluginHTTPBuffered is the original buffered implementation +func (g *apiRPCClient) pluginHTTPBuffered(request *http.Request) *http.Response { + forwardedRequest := &HTTPRequestSubset{ + Method: request.Method, + URL: request.URL, + Proto: request.Proto, + ProtoMajor: request.ProtoMajor, + ProtoMinor: request.ProtoMinor, + Header: request.Header, + Host: request.Host, + RemoteAddr: request.RemoteAddr, + RequestURI: request.RequestURI, + } + + _args := &Z_PluginHTTPArgs{ + Request: forwardedRequest, + } + + if request.Body != nil { + requestBody, err := io.ReadAll(request.Body) + if err != nil { + log.Printf("RPC call to PluginHTTP API failed: %s", err.Error()) + return nil + } + request.Body.Close() + request.Body = nil + + _args.RequestBody = requestBody + } + + _returns := &Z_PluginHTTPReturns{} + if err := g.client.Call("Plugin.PluginHTTP", _args, _returns); err != nil { + log.Printf("RPC call to PluginHTTP API failed: %s", err.Error()) + return nil + } + + _returns.Response.Body = io.NopCloser(bytes.NewBuffer(_returns.ResponseBody)) + + return _returns.Response +} + +func (s *apiRPCServer) PluginHTTPStream(args *Z_PluginHTTPStreamArgs, returns *Z_PluginHTTPStreamReturns) error { + responseConnection, err := s.muxBroker.Dial(args.ResponseBodyStream) + if err != nil { + return encodableError(fmt.Errorf("can't connect to remote response body stream: %w", err)) + } + + // Connect to request body stream + r := args.Request + if args.RequestBodyStream != 0 { + requestConnection, err := s.muxBroker.Dial(args.RequestBodyStream) + if err != nil { + return encodableError(fmt.Errorf("can't connect to remote request body stream: %w", err)) + } + r.Body = connectIOReader(requestConnection) + } else { + r.Body = io.NopCloser(&bytes.Buffer{}) + } + + httpReq := r.GetHTTPRequest() + + // Call the PluginHTTP implementation + if hook, ok := s.impl.(interface { + PluginHTTP(request *http.Request) *http.Response + }); ok { + response := hook.PluginHTTP(httpReq) + if response != nil { + returns.StatusCode = response.StatusCode + returns.Header = response.Header + + // Connect to response body stream and stream the response body + go func() { + defer r.Body.Close() + if response.Body != nil { + // Stream the response body through the connection + if _, err := io.Copy(responseConnection, response.Body); err != nil { + log.Printf("error streaming response body: %s", err.Error()) + } + response.Body.Close() + } + responseConnection.Close() + }() + } else { + r.Body.Close() + } + } else { + r.Body.Close() + return encodableError(fmt.Errorf("API PluginHTTP called but not implemented")) + } + + return nil +} + +// Server-side handler for old buffered PluginHTTP (for backward compatibility) +func (s *apiRPCServer) PluginHTTP(args *Z_PluginHTTPArgs, returns *Z_PluginHTTPReturns) error { + args.Request.Body = io.NopCloser(bytes.NewBuffer(args.RequestBody)) + + if hook, ok := s.impl.(interface { + PluginHTTP(request *http.Request) *http.Response + }); ok { + response := hook.PluginHTTP(args.Request.GetHTTPRequest()) + + responseBody, err := io.ReadAll(response.Body) + if err != nil { + return encodableError(fmt.Errorf("RPC call to PluginHTTP API failed: %s", err.Error())) + } + response.Body.Close() + response.Body = nil + + returns.Response = response + returns.ResponseBody = responseBody + } else { + return encodableError(fmt.Errorf("API PluginHTTP called but not implemented")) + } + return nil +} + +func init() { + hookNameToId["FileWillBeUploaded"] = FileWillBeUploadedID +} + +type Z_FileWillBeUploadedArgs struct { + A *Context + B *model.FileInfo + UploadedFileStream uint32 + ReplacementFileStream uint32 +} + +type Z_FileWillBeUploadedReturns struct { + A *model.FileInfo + B string +} + +func (g *hooksRPCClient) FileWillBeUploaded(c *Context, info *model.FileInfo, file io.Reader, output io.Writer) (*model.FileInfo, string) { + if !g.implemented[FileWillBeUploadedID] { + return info, "" + } + + uploadedFileStreamId := g.muxBroker.NextId() + go func() { + uploadedFileConnection, err := g.muxBroker.Accept(uploadedFileStreamId) + if err != nil { + g.log.Error("Plugin failed to serve upload file stream. MuxBroker could not Accept connection", mlog.Err(err)) + return + } + defer uploadedFileConnection.Close() + serveIOReader(file, uploadedFileConnection) + }() + + replacementDone := make(chan bool) + replacementFileStreamId := g.muxBroker.NextId() + go func() { + defer close(replacementDone) + + replacementFileConnection, err := g.muxBroker.Accept(replacementFileStreamId) + if err != nil { + g.log.Error("Plugin failed to serve replacement file stream. MuxBroker could not Accept connection", mlog.Err(err)) + return + } + defer replacementFileConnection.Close() + if _, err := io.Copy(output, replacementFileConnection); err != nil { + g.log.Error("Error reading replacement file.", mlog.Err(err)) + } + }() + + _args := &Z_FileWillBeUploadedArgs{c, info, uploadedFileStreamId, replacementFileStreamId} + _returns := &Z_FileWillBeUploadedReturns{A: _args.B} + if err := g.client.Call("Plugin.FileWillBeUploaded", _args, _returns); err != nil { + g.log.Error("RPC call FileWillBeUploaded to plugin failed.", mlog.Err(err)) + } + + // Ensure the io.Copy from the replacementFileConnection above completes. + <-replacementDone + + return _returns.A, _returns.B +} + +func (s *hooksRPCServer) FileWillBeUploaded(args *Z_FileWillBeUploadedArgs, returns *Z_FileWillBeUploadedReturns) error { + uploadFileConnection, err := s.muxBroker.Dial(args.UploadedFileStream) + if err != nil { + fmt.Fprintf(os.Stderr, "[ERROR] Can't connect to remote upload file stream, error: %v", err.Error()) + return err + } + defer uploadFileConnection.Close() + fileReader := connectIOReader(uploadFileConnection) + defer fileReader.Close() + + replacementFileConnection, err := s.muxBroker.Dial(args.ReplacementFileStream) + if err != nil { + fmt.Fprintf(os.Stderr, "[ERROR] Can't connect to remote replacement file stream, error: %v", err.Error()) + return err + } + defer replacementFileConnection.Close() + returnFileWriter := replacementFileConnection + + if hook, ok := s.impl.(interface { + FileWillBeUploaded(c *Context, info *model.FileInfo, file io.Reader, output io.Writer) (*model.FileInfo, string) + }); ok { + returns.A, returns.B = hook.FileWillBeUploaded(args.A, args.B, fileReader, returnFileWriter) + } else { + return fmt.Errorf("hook FileWillBeUploaded called but not implemented") + } + return nil +} + +// MessageWillBePosted is in this file because of the difficulty of identifying which fields need special behaviour. +// The special behaviour needed is decoding the returned post into the original one to avoid the unintentional removal +// of fields by older plugins. +func init() { + hookNameToId["MessageWillBePosted"] = MessageWillBePostedID +} + +type Z_MessageWillBePostedArgs struct { + A *Context + B *model.Post +} + +type Z_MessageWillBePostedReturns struct { + A *model.Post + B string +} + +func (g *hooksRPCClient) MessageWillBePosted(c *Context, post *model.Post) (*model.Post, string) { + _args := &Z_MessageWillBePostedArgs{c, post} + _returns := &Z_MessageWillBePostedReturns{A: _args.B} + if g.implemented[MessageWillBePostedID] { + if err := g.client.Call("Plugin.MessageWillBePosted", _args, _returns); err != nil { + g.log.Error("RPC call MessageWillBePosted to plugin failed.", mlog.Err(err)) + } + } + return _returns.A, _returns.B +} + +func (s *hooksRPCServer) MessageWillBePosted(args *Z_MessageWillBePostedArgs, returns *Z_MessageWillBePostedReturns) error { + if hook, ok := s.impl.(interface { + MessageWillBePosted(c *Context, post *model.Post) (*model.Post, string) + }); ok { + returns.A, returns.B = hook.MessageWillBePosted(args.A, args.B) + } else { + return encodableError(fmt.Errorf("hook MessageWillBePosted called but not implemented")) + } + return nil +} + +// MessageWillBeUpdated is in this file because of the difficulty of identifying which fields need special behaviour. +// The special behaviour needed is decoding the returned post into the original one to avoid the unintentional removal +// of fields by older plugins. +func init() { + hookNameToId["MessageWillBeUpdated"] = MessageWillBeUpdatedID +} + +type Z_MessageWillBeUpdatedArgs struct { + A *Context + B *model.Post + C *model.Post +} + +type Z_MessageWillBeUpdatedReturns struct { + A *model.Post + B string +} + +func (g *hooksRPCClient) MessageWillBeUpdated(c *Context, newPost, oldPost *model.Post) (*model.Post, string) { + _args := &Z_MessageWillBeUpdatedArgs{c, newPost, oldPost} + _default_returns := &Z_MessageWillBeUpdatedReturns{A: _args.B} + if g.implemented[MessageWillBeUpdatedID] { + _returns := &Z_MessageWillBeUpdatedReturns{} + if err := g.client.Call("Plugin.MessageWillBeUpdated", _args, _returns); err != nil { + g.log.Error("RPC call MessageWillBeUpdated to plugin failed.", mlog.Err(err)) + return _default_returns.A, _default_returns.B + } + return _returns.A, _returns.B + } + return _default_returns.A, _default_returns.B +} + +func (s *hooksRPCServer) MessageWillBeUpdated(args *Z_MessageWillBeUpdatedArgs, returns *Z_MessageWillBeUpdatedReturns) error { + if hook, ok := s.impl.(interface { + MessageWillBeUpdated(c *Context, newPost, oldPost *model.Post) (*model.Post, string) + }); ok { + returns.A, returns.B = hook.MessageWillBeUpdated(args.A, args.B, args.C) + } else { + return encodableError(fmt.Errorf("hook MessageWillBeUpdated called but not implemented")) + } + return nil +} + +// MessagesWillBeConsumed is in this file because of the difficulty of identifying which fields need special behaviour. +// The special behaviour needed is decoding the returned post into the original one to avoid the unintentional removal +// of fields by older plugins. +func init() { + hookNameToId["MessagesWillBeConsumed"] = MessagesWillBeConsumedID +} + +type Z_MessagesWillBeConsumedArgs struct { + A []*model.Post +} + +type Z_MessagesWillBeConsumedReturns struct { + A []*model.Post +} + +func (g *hooksRPCClient) MessagesWillBeConsumed(posts []*model.Post) []*model.Post { + _args := &Z_MessagesWillBeConsumedArgs{posts} + _returns := &Z_MessagesWillBeConsumedReturns{} + if g.implemented[MessagesWillBeConsumedID] { + if err := g.client.Call("Plugin.MessagesWillBeConsumed", _args, _returns); err != nil { + g.log.Error("RPC call MessagesWillBeConsumed to plugin failed.", mlog.Err(err)) + } + } + return _returns.A +} + +func (s *hooksRPCServer) MessagesWillBeConsumed(args *Z_MessagesWillBeConsumedArgs, returns *Z_MessagesWillBeConsumedReturns) error { + if hook, ok := s.impl.(interface { + MessagesWillBeConsumed(posts []*model.Post) []*model.Post + }); ok { + returns.A = hook.MessagesWillBeConsumed(args.A) + } else { + return encodableError(fmt.Errorf("hook MessagesWillBeConsumed called but not implemented")) + } + return nil +} + +type Z_LogDebugArgs struct { + A string + B []any +} + +type Z_LogDebugReturns struct{} + +func (g *apiRPCClient) LogDebug(msg string, keyValuePairs ...any) { + stringifiedPairs := stringifyToObjects(keyValuePairs) + _args := &Z_LogDebugArgs{msg, stringifiedPairs} + _returns := &Z_LogDebugReturns{} + if err := g.client.Call("Plugin.LogDebug", _args, _returns); err != nil { + log.Printf("RPC call to LogDebug API failed: %s", err.Error()) + } +} + +func (s *apiRPCServer) LogDebug(args *Z_LogDebugArgs, returns *Z_LogDebugReturns) error { + if hook, ok := s.impl.(interface { + LogDebug(msg string, keyValuePairs ...any) + }); ok { + hook.LogDebug(args.A, args.B...) + } else { + return encodableError(fmt.Errorf("API LogDebug called but not implemented")) + } + return nil +} + +type Z_LogInfoArgs struct { + A string + B []any +} + +type Z_LogInfoReturns struct{} + +func (g *apiRPCClient) LogInfo(msg string, keyValuePairs ...any) { + stringifiedPairs := stringifyToObjects(keyValuePairs) + _args := &Z_LogInfoArgs{msg, stringifiedPairs} + _returns := &Z_LogInfoReturns{} + if err := g.client.Call("Plugin.LogInfo", _args, _returns); err != nil { + log.Printf("RPC call to LogInfo API failed: %s", err.Error()) + } +} + +func (s *apiRPCServer) LogInfo(args *Z_LogInfoArgs, returns *Z_LogInfoReturns) error { + if hook, ok := s.impl.(interface { + LogInfo(msg string, keyValuePairs ...any) + }); ok { + hook.LogInfo(args.A, args.B...) + } else { + return encodableError(fmt.Errorf("API LogInfo called but not implemented")) + } + return nil +} + +type Z_LogWarnArgs struct { + A string + B []any +} + +type Z_LogWarnReturns struct{} + +func (g *apiRPCClient) LogWarn(msg string, keyValuePairs ...any) { + stringifiedPairs := stringifyToObjects(keyValuePairs) + _args := &Z_LogWarnArgs{msg, stringifiedPairs} + _returns := &Z_LogWarnReturns{} + if err := g.client.Call("Plugin.LogWarn", _args, _returns); err != nil { + log.Printf("RPC call to LogWarn API failed: %s", err.Error()) + } +} + +func (s *apiRPCServer) LogWarn(args *Z_LogWarnArgs, returns *Z_LogWarnReturns) error { + if hook, ok := s.impl.(interface { + LogWarn(msg string, keyValuePairs ...any) + }); ok { + hook.LogWarn(args.A, args.B...) + } else { + return encodableError(fmt.Errorf("API LogWarn called but not implemented")) + } + return nil +} + +type Z_LogErrorArgs struct { + A string + B []any +} + +type Z_LogErrorReturns struct{} + +func (g *apiRPCClient) LogError(msg string, keyValuePairs ...any) { + stringifiedPairs := stringifyToObjects(keyValuePairs) + _args := &Z_LogErrorArgs{msg, stringifiedPairs} + _returns := &Z_LogErrorReturns{} + if err := g.client.Call("Plugin.LogError", _args, _returns); err != nil { + log.Printf("RPC call to LogError API failed: %s", err.Error()) + } +} + +func (s *apiRPCServer) LogError(args *Z_LogErrorArgs, returns *Z_LogErrorReturns) error { + if hook, ok := s.impl.(interface { + LogError(msg string, keyValuePairs ...any) + }); ok { + hook.LogError(args.A, args.B...) + } else { + return encodableError(fmt.Errorf("API LogError called but not implemented")) + } + return nil +} + +type Z_LogAuditRecArgs struct { + A *model.AuditRecord +} + +type Z_LogAuditRecReturns struct { +} + +// Custom audit logging methods with gob safety checks +func (g *apiRPCClient) LogAuditRec(rec *model.AuditRecord) { + gobSafeRec := makeAuditRecordGobSafe(*rec) + _args := &Z_LogAuditRecArgs{&gobSafeRec} + _returns := &Z_LogAuditRecReturns{} + if err := g.client.Call("Plugin.LogAuditRec", _args, _returns); err != nil { + log.Printf("RPC call to LogAuditRec API failed: %s", err.Error()) + } +} + +func (s *apiRPCServer) LogAuditRec(args *Z_LogAuditRecArgs, returns *Z_LogAuditRecReturns) error { + if hook, ok := s.impl.(interface { + LogAuditRec(rec *model.AuditRecord) + }); ok { + hook.LogAuditRec(args.A) + } else { + return encodableError(fmt.Errorf("API LogAuditRec called but not implemented")) + } + return nil +} + +type Z_LogAuditRecWithLevelArgs struct { + A *model.AuditRecord + B mlog.Level +} + +type Z_LogAuditRecWithLevelReturns struct { +} + +func (g *apiRPCClient) LogAuditRecWithLevel(rec *model.AuditRecord, level mlog.Level) { + gobSafeRec := makeAuditRecordGobSafe(*rec) + _args := &Z_LogAuditRecWithLevelArgs{&gobSafeRec, level} + _returns := &Z_LogAuditRecWithLevelReturns{} + if err := g.client.Call("Plugin.LogAuditRecWithLevel", _args, _returns); err != nil { + log.Printf("RPC call to LogAuditRecWithLevel API failed: %s", err.Error()) + } +} + +func (s *apiRPCServer) LogAuditRecWithLevel(args *Z_LogAuditRecWithLevelArgs, returns *Z_LogAuditRecWithLevelReturns) error { + if hook, ok := s.impl.(interface { + LogAuditRecWithLevel(rec *model.AuditRecord, level mlog.Level) + }); ok { + hook.LogAuditRecWithLevel(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API LogAuditRecWithLevel called but not implemented")) + } + return nil +} + +type Z_InstallPluginArgs struct { + PluginStreamID uint32 + B bool +} + +type Z_InstallPluginReturns struct { + A *model.Manifest + B *model.AppError +} + +func (g *apiRPCClient) InstallPlugin(file io.Reader, replace bool) (*model.Manifest, *model.AppError) { + pluginStreamID := g.muxBroker.NextId() + + go func() { + uploadPluginConnection, err := g.muxBroker.Accept(pluginStreamID) + if err != nil { + log.Print("Plugin failed to upload plugin. MuxBroker could not Accept connection", mlog.Err(err)) + return + } + defer uploadPluginConnection.Close() + serveIOReader(file, uploadPluginConnection) + }() + + _args := &Z_InstallPluginArgs{pluginStreamID, replace} + _returns := &Z_InstallPluginReturns{} + if err := g.client.Call("Plugin.InstallPlugin", _args, _returns); err != nil { + log.Print("RPC call InstallPlugin to plugin failed.", mlog.Err(err)) + } + + return _returns.A, _returns.B +} + +func (s *apiRPCServer) InstallPlugin(args *Z_InstallPluginArgs, returns *Z_InstallPluginReturns) error { + hook, ok := s.impl.(interface { + InstallPlugin(file io.Reader, replace bool) (*model.Manifest, *model.AppError) + }) + if !ok { + return encodableError(fmt.Errorf("API InstallPlugin called but not implemented")) + } + + receivePluginConnection, err := s.muxBroker.Dial(args.PluginStreamID) + if err != nil { + fmt.Fprintf(os.Stderr, "[ERROR] Can't connect to remote plugin stream, error: %v", err.Error()) + return err + } + pluginReader := connectIOReader(receivePluginConnection) + defer pluginReader.Close() + + returns.A, returns.B = hook.InstallPlugin(pluginReader, args.B) + return nil +} + +type Z_UploadDataArgs struct { + A *model.UploadSession + PluginStreamID uint32 +} + +type Z_UploadDataReturns struct { + A *model.FileInfo + B error +} + +func (g *apiRPCClient) UploadData(us *model.UploadSession, rd io.Reader) (*model.FileInfo, error) { + pluginStreamID := g.muxBroker.NextId() + + go func() { + pluginConnection, err := g.muxBroker.Accept(pluginStreamID) + if err != nil { + log.Print("Failed to upload data. MuxBroker could not Accept connection", mlog.Err(err)) + return + } + defer pluginConnection.Close() + serveIOReader(rd, pluginConnection) + }() + + _args := &Z_UploadDataArgs{us, pluginStreamID} + _returns := &Z_UploadDataReturns{} + if err := g.client.Call("Plugin.UploadData", _args, _returns); err != nil { + log.Print("RPC call UploadData to plugin failed.", mlog.Err(err)) + } + + return _returns.A, _returns.B +} + +func (s *apiRPCServer) UploadData(args *Z_UploadDataArgs, returns *Z_UploadDataReturns) error { + hook, ok := s.impl.(interface { + UploadData(us *model.UploadSession, rd io.Reader) (*model.FileInfo, error) + }) + if !ok { + return encodableError(fmt.Errorf("API UploadData called but not implemented")) + } + + receivePluginConnection, err := s.muxBroker.Dial(args.PluginStreamID) + if err != nil { + fmt.Fprintf(os.Stderr, "[ERROR] Can't connect to remote plugin stream, error: %v", err.Error()) + return err + } + pluginReader := connectIOReader(receivePluginConnection) + defer pluginReader.Close() + + returns.A, returns.B = hook.UploadData(args.A, pluginReader) + return nil +} + +func init() { + hookNameToId["ServeMetrics"] = ServeMetricsID +} + +type Z_ServeMetricsArgs struct { + ResponseWriterStream uint32 + Request *HTTPRequestSubset + Context *Context + RequestBodyStream uint32 +} + +func (g *hooksRPCClient) ServeMetrics(c *Context, w http.ResponseWriter, r *http.Request) { + if !g.implemented[ServeMetricsID] { + http.NotFound(w, r) + return + } + + serveMetricsStreamId := g.muxBroker.NextId() + go func() { + connection, err := g.muxBroker.Accept(serveMetricsStreamId) + if err != nil { + g.log.Error("Plugin failed to ServeMetrics, muxBroker couldn't accept connection", mlog.Uint("serve_http_stream_id", serveMetricsStreamId), mlog.Err(err)) + return + } + defer connection.Close() + + rpcServer := rpc.NewServer() + if err := rpcServer.RegisterName("Plugin", &httpResponseWriterRPCServer{w: w, log: g.log}); err != nil { + g.log.Error("Plugin failed to ServeMetrics, couldn't register RPC name", mlog.Err(err)) + return + } + rpcServer.ServeConn(connection) + }() + + requestBodyStreamId := uint32(0) + if r.Body != nil { + requestBodyStreamId = g.muxBroker.NextId() + go func() { + bodyConnection, err := g.muxBroker.Accept(requestBodyStreamId) + if err != nil { + g.log.Error("Plugin failed to ServeMetrics, muxBroker couldn't Accept request body connection", mlog.Err(err)) + return + } + defer bodyConnection.Close() + serveIOReader(r.Body, bodyConnection) + }() + } + + forwardedRequest := &HTTPRequestSubset{ + Method: r.Method, + URL: r.URL, + Proto: r.Proto, + ProtoMajor: r.ProtoMajor, + ProtoMinor: r.ProtoMinor, + Header: r.Header, + Host: r.Host, + RemoteAddr: r.RemoteAddr, + RequestURI: r.RequestURI, + } + + if err := g.client.Call("Plugin.ServeMetrics", Z_ServeMetricsArgs{ + Context: c, + ResponseWriterStream: serveMetricsStreamId, + Request: forwardedRequest, + RequestBodyStream: requestBodyStreamId, + }, nil); err != nil { + g.log.Error("Plugin failed to ServeMetrics, RPC call failed", mlog.Err(err)) + http.Error(w, "500 internal server error", http.StatusInternalServerError) + } +} + +func (s *hooksRPCServer) ServeMetrics(args *Z_ServeMetricsArgs, returns *struct{}) error { + connection, err := s.muxBroker.Dial(args.ResponseWriterStream) + if err != nil { + fmt.Fprintf(os.Stderr, "[ERROR] Can't connect to remote response writer stream, error: %v", err.Error()) + return err + } + w := connectHTTPResponseWriter(connection) + defer w.Close() + + r := args.Request + if args.RequestBodyStream != 0 { + connection, err := s.muxBroker.Dial(args.RequestBodyStream) + if err != nil { + fmt.Fprintf(os.Stderr, "[ERROR] Can't connect to remote request body stream, error: %v", err.Error()) + return err + } + r.Body = connectIOReader(connection) + } else { + r.Body = io.NopCloser(&bytes.Buffer{}) + } + defer r.Body.Close() + + httpReq := r.GetHTTPRequest() + + if hook, ok := s.impl.(interface { + ServeMetrics(c *Context, w http.ResponseWriter, r *http.Request) + }); ok { + hook.ServeMetrics(args.Context, w, httpReq) + } else { + http.NotFound(w, httpReq) + } + + return nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/plugin/client_rpc_generated.go b/vendor/github.com/mattermost/mattermost/server/public/plugin/client_rpc_generated.go new file mode 100644 index 00000000..cc6bb63f --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/plugin/client_rpc_generated.go @@ -0,0 +1,7912 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +// Code generated by "make pluginapi" +// DO NOT EDIT + +package plugin + +import ( + "fmt" + "log" + + saml2 "github.com/mattermost/gosaml2" + "github.com/mattermost/mattermost/server/public/model" + "github.com/mattermost/mattermost/server/public/shared/mlog" +) + +func init() { + hookNameToId["OnDeactivate"] = OnDeactivateID +} + +type Z_OnDeactivateArgs struct { +} + +type Z_OnDeactivateReturns struct { + A error +} + +func (g *hooksRPCClient) OnDeactivate() error { + _args := &Z_OnDeactivateArgs{} + _returns := &Z_OnDeactivateReturns{} + if g.implemented[OnDeactivateID] { + if err := g.client.Call("Plugin.OnDeactivate", _args, _returns); err != nil { + g.log.Error("RPC call OnDeactivate to plugin failed.", mlog.Err(err)) + } + } + return _returns.A +} + +func (s *hooksRPCServer) OnDeactivate(args *Z_OnDeactivateArgs, returns *Z_OnDeactivateReturns) error { + if hook, ok := s.impl.(interface { + OnDeactivate() error + }); ok { + returns.A = hook.OnDeactivate() + returns.A = encodableError(returns.A) + } else { + return encodableError(fmt.Errorf("Hook OnDeactivate called but not implemented.")) + } + return nil +} + +func init() { + hookNameToId["OnConfigurationChange"] = OnConfigurationChangeID +} + +type Z_OnConfigurationChangeArgs struct { +} + +type Z_OnConfigurationChangeReturns struct { + A error +} + +func (g *hooksRPCClient) OnConfigurationChange() error { + _args := &Z_OnConfigurationChangeArgs{} + _returns := &Z_OnConfigurationChangeReturns{} + if g.implemented[OnConfigurationChangeID] { + if err := g.client.Call("Plugin.OnConfigurationChange", _args, _returns); err != nil { + g.log.Error("RPC call OnConfigurationChange to plugin failed.", mlog.Err(err)) + } + } + return _returns.A +} + +func (s *hooksRPCServer) OnConfigurationChange(args *Z_OnConfigurationChangeArgs, returns *Z_OnConfigurationChangeReturns) error { + if hook, ok := s.impl.(interface { + OnConfigurationChange() error + }); ok { + returns.A = hook.OnConfigurationChange() + returns.A = encodableError(returns.A) + } else { + return encodableError(fmt.Errorf("Hook OnConfigurationChange called but not implemented.")) + } + return nil +} + +func init() { + hookNameToId["ExecuteCommand"] = ExecuteCommandID +} + +type Z_ExecuteCommandArgs struct { + A *Context + B *model.CommandArgs +} + +type Z_ExecuteCommandReturns struct { + A *model.CommandResponse + B *model.AppError +} + +func (g *hooksRPCClient) ExecuteCommand(c *Context, args *model.CommandArgs) (*model.CommandResponse, *model.AppError) { + _args := &Z_ExecuteCommandArgs{c, args} + _returns := &Z_ExecuteCommandReturns{} + if g.implemented[ExecuteCommandID] { + if err := g.client.Call("Plugin.ExecuteCommand", _args, _returns); err != nil { + g.log.Error("RPC call ExecuteCommand to plugin failed.", mlog.Err(err)) + } + } + return _returns.A, _returns.B +} + +func (s *hooksRPCServer) ExecuteCommand(args *Z_ExecuteCommandArgs, returns *Z_ExecuteCommandReturns) error { + if hook, ok := s.impl.(interface { + ExecuteCommand(c *Context, args *model.CommandArgs) (*model.CommandResponse, *model.AppError) + }); ok { + returns.A, returns.B = hook.ExecuteCommand(args.A, args.B) + } else { + return encodableError(fmt.Errorf("Hook ExecuteCommand called but not implemented.")) + } + return nil +} + +func init() { + hookNameToId["UserHasBeenCreated"] = UserHasBeenCreatedID +} + +type Z_UserHasBeenCreatedArgs struct { + A *Context + B *model.User +} + +type Z_UserHasBeenCreatedReturns struct { +} + +func (g *hooksRPCClient) UserHasBeenCreated(c *Context, user *model.User) { + _args := &Z_UserHasBeenCreatedArgs{c, user} + _returns := &Z_UserHasBeenCreatedReturns{} + if g.implemented[UserHasBeenCreatedID] { + if err := g.client.Call("Plugin.UserHasBeenCreated", _args, _returns); err != nil { + g.log.Error("RPC call UserHasBeenCreated to plugin failed.", mlog.Err(err)) + } + } + +} + +func (s *hooksRPCServer) UserHasBeenCreated(args *Z_UserHasBeenCreatedArgs, returns *Z_UserHasBeenCreatedReturns) error { + if hook, ok := s.impl.(interface { + UserHasBeenCreated(c *Context, user *model.User) + }); ok { + hook.UserHasBeenCreated(args.A, args.B) + } else { + return encodableError(fmt.Errorf("Hook UserHasBeenCreated called but not implemented.")) + } + return nil +} + +func init() { + hookNameToId["UserWillLogIn"] = UserWillLogInID +} + +type Z_UserWillLogInArgs struct { + A *Context + B *model.User +} + +type Z_UserWillLogInReturns struct { + A string +} + +func (g *hooksRPCClient) UserWillLogIn(c *Context, user *model.User) string { + _args := &Z_UserWillLogInArgs{c, user} + _returns := &Z_UserWillLogInReturns{} + if g.implemented[UserWillLogInID] { + if err := g.client.Call("Plugin.UserWillLogIn", _args, _returns); err != nil { + g.log.Error("RPC call UserWillLogIn to plugin failed.", mlog.Err(err)) + } + } + return _returns.A +} + +func (s *hooksRPCServer) UserWillLogIn(args *Z_UserWillLogInArgs, returns *Z_UserWillLogInReturns) error { + if hook, ok := s.impl.(interface { + UserWillLogIn(c *Context, user *model.User) string + }); ok { + returns.A = hook.UserWillLogIn(args.A, args.B) + } else { + return encodableError(fmt.Errorf("Hook UserWillLogIn called but not implemented.")) + } + return nil +} + +func init() { + hookNameToId["UserHasLoggedIn"] = UserHasLoggedInID +} + +type Z_UserHasLoggedInArgs struct { + A *Context + B *model.User +} + +type Z_UserHasLoggedInReturns struct { +} + +func (g *hooksRPCClient) UserHasLoggedIn(c *Context, user *model.User) { + _args := &Z_UserHasLoggedInArgs{c, user} + _returns := &Z_UserHasLoggedInReturns{} + if g.implemented[UserHasLoggedInID] { + if err := g.client.Call("Plugin.UserHasLoggedIn", _args, _returns); err != nil { + g.log.Error("RPC call UserHasLoggedIn to plugin failed.", mlog.Err(err)) + } + } + +} + +func (s *hooksRPCServer) UserHasLoggedIn(args *Z_UserHasLoggedInArgs, returns *Z_UserHasLoggedInReturns) error { + if hook, ok := s.impl.(interface { + UserHasLoggedIn(c *Context, user *model.User) + }); ok { + hook.UserHasLoggedIn(args.A, args.B) + } else { + return encodableError(fmt.Errorf("Hook UserHasLoggedIn called but not implemented.")) + } + return nil +} + +func init() { + hookNameToId["MessageHasBeenPosted"] = MessageHasBeenPostedID +} + +type Z_MessageHasBeenPostedArgs struct { + A *Context + B *model.Post +} + +type Z_MessageHasBeenPostedReturns struct { +} + +func (g *hooksRPCClient) MessageHasBeenPosted(c *Context, post *model.Post) { + _args := &Z_MessageHasBeenPostedArgs{c, post} + _returns := &Z_MessageHasBeenPostedReturns{} + if g.implemented[MessageHasBeenPostedID] { + if err := g.client.Call("Plugin.MessageHasBeenPosted", _args, _returns); err != nil { + g.log.Error("RPC call MessageHasBeenPosted to plugin failed.", mlog.Err(err)) + } + } + +} + +func (s *hooksRPCServer) MessageHasBeenPosted(args *Z_MessageHasBeenPostedArgs, returns *Z_MessageHasBeenPostedReturns) error { + if hook, ok := s.impl.(interface { + MessageHasBeenPosted(c *Context, post *model.Post) + }); ok { + hook.MessageHasBeenPosted(args.A, args.B) + } else { + return encodableError(fmt.Errorf("Hook MessageHasBeenPosted called but not implemented.")) + } + return nil +} + +func init() { + hookNameToId["MessageHasBeenUpdated"] = MessageHasBeenUpdatedID +} + +type Z_MessageHasBeenUpdatedArgs struct { + A *Context + B *model.Post + C *model.Post +} + +type Z_MessageHasBeenUpdatedReturns struct { +} + +func (g *hooksRPCClient) MessageHasBeenUpdated(c *Context, newPost, oldPost *model.Post) { + _args := &Z_MessageHasBeenUpdatedArgs{c, newPost, oldPost} + _returns := &Z_MessageHasBeenUpdatedReturns{} + if g.implemented[MessageHasBeenUpdatedID] { + if err := g.client.Call("Plugin.MessageHasBeenUpdated", _args, _returns); err != nil { + g.log.Error("RPC call MessageHasBeenUpdated to plugin failed.", mlog.Err(err)) + } + } + +} + +func (s *hooksRPCServer) MessageHasBeenUpdated(args *Z_MessageHasBeenUpdatedArgs, returns *Z_MessageHasBeenUpdatedReturns) error { + if hook, ok := s.impl.(interface { + MessageHasBeenUpdated(c *Context, newPost, oldPost *model.Post) + }); ok { + hook.MessageHasBeenUpdated(args.A, args.B, args.C) + } else { + return encodableError(fmt.Errorf("Hook MessageHasBeenUpdated called but not implemented.")) + } + return nil +} + +func init() { + hookNameToId["MessageHasBeenDeleted"] = MessageHasBeenDeletedID +} + +type Z_MessageHasBeenDeletedArgs struct { + A *Context + B *model.Post +} + +type Z_MessageHasBeenDeletedReturns struct { +} + +func (g *hooksRPCClient) MessageHasBeenDeleted(c *Context, post *model.Post) { + _args := &Z_MessageHasBeenDeletedArgs{c, post} + _returns := &Z_MessageHasBeenDeletedReturns{} + if g.implemented[MessageHasBeenDeletedID] { + if err := g.client.Call("Plugin.MessageHasBeenDeleted", _args, _returns); err != nil { + g.log.Error("RPC call MessageHasBeenDeleted to plugin failed.", mlog.Err(err)) + } + } + +} + +func (s *hooksRPCServer) MessageHasBeenDeleted(args *Z_MessageHasBeenDeletedArgs, returns *Z_MessageHasBeenDeletedReturns) error { + if hook, ok := s.impl.(interface { + MessageHasBeenDeleted(c *Context, post *model.Post) + }); ok { + hook.MessageHasBeenDeleted(args.A, args.B) + } else { + return encodableError(fmt.Errorf("Hook MessageHasBeenDeleted called but not implemented.")) + } + return nil +} + +func init() { + hookNameToId["ChannelHasBeenCreated"] = ChannelHasBeenCreatedID +} + +type Z_ChannelHasBeenCreatedArgs struct { + A *Context + B *model.Channel +} + +type Z_ChannelHasBeenCreatedReturns struct { +} + +func (g *hooksRPCClient) ChannelHasBeenCreated(c *Context, channel *model.Channel) { + _args := &Z_ChannelHasBeenCreatedArgs{c, channel} + _returns := &Z_ChannelHasBeenCreatedReturns{} + if g.implemented[ChannelHasBeenCreatedID] { + if err := g.client.Call("Plugin.ChannelHasBeenCreated", _args, _returns); err != nil { + g.log.Error("RPC call ChannelHasBeenCreated to plugin failed.", mlog.Err(err)) + } + } + +} + +func (s *hooksRPCServer) ChannelHasBeenCreated(args *Z_ChannelHasBeenCreatedArgs, returns *Z_ChannelHasBeenCreatedReturns) error { + if hook, ok := s.impl.(interface { + ChannelHasBeenCreated(c *Context, channel *model.Channel) + }); ok { + hook.ChannelHasBeenCreated(args.A, args.B) + } else { + return encodableError(fmt.Errorf("Hook ChannelHasBeenCreated called but not implemented.")) + } + return nil +} + +func init() { + hookNameToId["UserHasJoinedChannel"] = UserHasJoinedChannelID +} + +type Z_UserHasJoinedChannelArgs struct { + A *Context + B *model.ChannelMember + C *model.User +} + +type Z_UserHasJoinedChannelReturns struct { +} + +func (g *hooksRPCClient) UserHasJoinedChannel(c *Context, channelMember *model.ChannelMember, actor *model.User) { + _args := &Z_UserHasJoinedChannelArgs{c, channelMember, actor} + _returns := &Z_UserHasJoinedChannelReturns{} + if g.implemented[UserHasJoinedChannelID] { + if err := g.client.Call("Plugin.UserHasJoinedChannel", _args, _returns); err != nil { + g.log.Error("RPC call UserHasJoinedChannel to plugin failed.", mlog.Err(err)) + } + } + +} + +func (s *hooksRPCServer) UserHasJoinedChannel(args *Z_UserHasJoinedChannelArgs, returns *Z_UserHasJoinedChannelReturns) error { + if hook, ok := s.impl.(interface { + UserHasJoinedChannel(c *Context, channelMember *model.ChannelMember, actor *model.User) + }); ok { + hook.UserHasJoinedChannel(args.A, args.B, args.C) + } else { + return encodableError(fmt.Errorf("Hook UserHasJoinedChannel called but not implemented.")) + } + return nil +} + +func init() { + hookNameToId["UserHasLeftChannel"] = UserHasLeftChannelID +} + +type Z_UserHasLeftChannelArgs struct { + A *Context + B *model.ChannelMember + C *model.User +} + +type Z_UserHasLeftChannelReturns struct { +} + +func (g *hooksRPCClient) UserHasLeftChannel(c *Context, channelMember *model.ChannelMember, actor *model.User) { + _args := &Z_UserHasLeftChannelArgs{c, channelMember, actor} + _returns := &Z_UserHasLeftChannelReturns{} + if g.implemented[UserHasLeftChannelID] { + if err := g.client.Call("Plugin.UserHasLeftChannel", _args, _returns); err != nil { + g.log.Error("RPC call UserHasLeftChannel to plugin failed.", mlog.Err(err)) + } + } + +} + +func (s *hooksRPCServer) UserHasLeftChannel(args *Z_UserHasLeftChannelArgs, returns *Z_UserHasLeftChannelReturns) error { + if hook, ok := s.impl.(interface { + UserHasLeftChannel(c *Context, channelMember *model.ChannelMember, actor *model.User) + }); ok { + hook.UserHasLeftChannel(args.A, args.B, args.C) + } else { + return encodableError(fmt.Errorf("Hook UserHasLeftChannel called but not implemented.")) + } + return nil +} + +func init() { + hookNameToId["UserHasJoinedTeam"] = UserHasJoinedTeamID +} + +type Z_UserHasJoinedTeamArgs struct { + A *Context + B *model.TeamMember + C *model.User +} + +type Z_UserHasJoinedTeamReturns struct { +} + +func (g *hooksRPCClient) UserHasJoinedTeam(c *Context, teamMember *model.TeamMember, actor *model.User) { + _args := &Z_UserHasJoinedTeamArgs{c, teamMember, actor} + _returns := &Z_UserHasJoinedTeamReturns{} + if g.implemented[UserHasJoinedTeamID] { + if err := g.client.Call("Plugin.UserHasJoinedTeam", _args, _returns); err != nil { + g.log.Error("RPC call UserHasJoinedTeam to plugin failed.", mlog.Err(err)) + } + } + +} + +func (s *hooksRPCServer) UserHasJoinedTeam(args *Z_UserHasJoinedTeamArgs, returns *Z_UserHasJoinedTeamReturns) error { + if hook, ok := s.impl.(interface { + UserHasJoinedTeam(c *Context, teamMember *model.TeamMember, actor *model.User) + }); ok { + hook.UserHasJoinedTeam(args.A, args.B, args.C) + } else { + return encodableError(fmt.Errorf("Hook UserHasJoinedTeam called but not implemented.")) + } + return nil +} + +func init() { + hookNameToId["UserHasLeftTeam"] = UserHasLeftTeamID +} + +type Z_UserHasLeftTeamArgs struct { + A *Context + B *model.TeamMember + C *model.User +} + +type Z_UserHasLeftTeamReturns struct { +} + +func (g *hooksRPCClient) UserHasLeftTeam(c *Context, teamMember *model.TeamMember, actor *model.User) { + _args := &Z_UserHasLeftTeamArgs{c, teamMember, actor} + _returns := &Z_UserHasLeftTeamReturns{} + if g.implemented[UserHasLeftTeamID] { + if err := g.client.Call("Plugin.UserHasLeftTeam", _args, _returns); err != nil { + g.log.Error("RPC call UserHasLeftTeam to plugin failed.", mlog.Err(err)) + } + } + +} + +func (s *hooksRPCServer) UserHasLeftTeam(args *Z_UserHasLeftTeamArgs, returns *Z_UserHasLeftTeamReturns) error { + if hook, ok := s.impl.(interface { + UserHasLeftTeam(c *Context, teamMember *model.TeamMember, actor *model.User) + }); ok { + hook.UserHasLeftTeam(args.A, args.B, args.C) + } else { + return encodableError(fmt.Errorf("Hook UserHasLeftTeam called but not implemented.")) + } + return nil +} + +func init() { + hookNameToId["ReactionHasBeenAdded"] = ReactionHasBeenAddedID +} + +type Z_ReactionHasBeenAddedArgs struct { + A *Context + B *model.Reaction +} + +type Z_ReactionHasBeenAddedReturns struct { +} + +func (g *hooksRPCClient) ReactionHasBeenAdded(c *Context, reaction *model.Reaction) { + _args := &Z_ReactionHasBeenAddedArgs{c, reaction} + _returns := &Z_ReactionHasBeenAddedReturns{} + if g.implemented[ReactionHasBeenAddedID] { + if err := g.client.Call("Plugin.ReactionHasBeenAdded", _args, _returns); err != nil { + g.log.Error("RPC call ReactionHasBeenAdded to plugin failed.", mlog.Err(err)) + } + } + +} + +func (s *hooksRPCServer) ReactionHasBeenAdded(args *Z_ReactionHasBeenAddedArgs, returns *Z_ReactionHasBeenAddedReturns) error { + if hook, ok := s.impl.(interface { + ReactionHasBeenAdded(c *Context, reaction *model.Reaction) + }); ok { + hook.ReactionHasBeenAdded(args.A, args.B) + } else { + return encodableError(fmt.Errorf("Hook ReactionHasBeenAdded called but not implemented.")) + } + return nil +} + +func init() { + hookNameToId["ReactionHasBeenRemoved"] = ReactionHasBeenRemovedID +} + +type Z_ReactionHasBeenRemovedArgs struct { + A *Context + B *model.Reaction +} + +type Z_ReactionHasBeenRemovedReturns struct { +} + +func (g *hooksRPCClient) ReactionHasBeenRemoved(c *Context, reaction *model.Reaction) { + _args := &Z_ReactionHasBeenRemovedArgs{c, reaction} + _returns := &Z_ReactionHasBeenRemovedReturns{} + if g.implemented[ReactionHasBeenRemovedID] { + if err := g.client.Call("Plugin.ReactionHasBeenRemoved", _args, _returns); err != nil { + g.log.Error("RPC call ReactionHasBeenRemoved to plugin failed.", mlog.Err(err)) + } + } + +} + +func (s *hooksRPCServer) ReactionHasBeenRemoved(args *Z_ReactionHasBeenRemovedArgs, returns *Z_ReactionHasBeenRemovedReturns) error { + if hook, ok := s.impl.(interface { + ReactionHasBeenRemoved(c *Context, reaction *model.Reaction) + }); ok { + hook.ReactionHasBeenRemoved(args.A, args.B) + } else { + return encodableError(fmt.Errorf("Hook ReactionHasBeenRemoved called but not implemented.")) + } + return nil +} + +func init() { + hookNameToId["OnPluginClusterEvent"] = OnPluginClusterEventID +} + +type Z_OnPluginClusterEventArgs struct { + A *Context + B model.PluginClusterEvent +} + +type Z_OnPluginClusterEventReturns struct { +} + +func (g *hooksRPCClient) OnPluginClusterEvent(c *Context, ev model.PluginClusterEvent) { + _args := &Z_OnPluginClusterEventArgs{c, ev} + _returns := &Z_OnPluginClusterEventReturns{} + if g.implemented[OnPluginClusterEventID] { + if err := g.client.Call("Plugin.OnPluginClusterEvent", _args, _returns); err != nil { + g.log.Error("RPC call OnPluginClusterEvent to plugin failed.", mlog.Err(err)) + } + } + +} + +func (s *hooksRPCServer) OnPluginClusterEvent(args *Z_OnPluginClusterEventArgs, returns *Z_OnPluginClusterEventReturns) error { + if hook, ok := s.impl.(interface { + OnPluginClusterEvent(c *Context, ev model.PluginClusterEvent) + }); ok { + hook.OnPluginClusterEvent(args.A, args.B) + } else { + return encodableError(fmt.Errorf("Hook OnPluginClusterEvent called but not implemented.")) + } + return nil +} + +func init() { + hookNameToId["OnWebSocketConnect"] = OnWebSocketConnectID +} + +type Z_OnWebSocketConnectArgs struct { + A string + B string +} + +type Z_OnWebSocketConnectReturns struct { +} + +func (g *hooksRPCClient) OnWebSocketConnect(webConnID, userID string) { + _args := &Z_OnWebSocketConnectArgs{webConnID, userID} + _returns := &Z_OnWebSocketConnectReturns{} + if g.implemented[OnWebSocketConnectID] { + if err := g.client.Call("Plugin.OnWebSocketConnect", _args, _returns); err != nil { + g.log.Error("RPC call OnWebSocketConnect to plugin failed.", mlog.Err(err)) + } + } + +} + +func (s *hooksRPCServer) OnWebSocketConnect(args *Z_OnWebSocketConnectArgs, returns *Z_OnWebSocketConnectReturns) error { + if hook, ok := s.impl.(interface { + OnWebSocketConnect(webConnID, userID string) + }); ok { + hook.OnWebSocketConnect(args.A, args.B) + } else { + return encodableError(fmt.Errorf("Hook OnWebSocketConnect called but not implemented.")) + } + return nil +} + +func init() { + hookNameToId["OnWebSocketDisconnect"] = OnWebSocketDisconnectID +} + +type Z_OnWebSocketDisconnectArgs struct { + A string + B string +} + +type Z_OnWebSocketDisconnectReturns struct { +} + +func (g *hooksRPCClient) OnWebSocketDisconnect(webConnID, userID string) { + _args := &Z_OnWebSocketDisconnectArgs{webConnID, userID} + _returns := &Z_OnWebSocketDisconnectReturns{} + if g.implemented[OnWebSocketDisconnectID] { + if err := g.client.Call("Plugin.OnWebSocketDisconnect", _args, _returns); err != nil { + g.log.Error("RPC call OnWebSocketDisconnect to plugin failed.", mlog.Err(err)) + } + } + +} + +func (s *hooksRPCServer) OnWebSocketDisconnect(args *Z_OnWebSocketDisconnectArgs, returns *Z_OnWebSocketDisconnectReturns) error { + if hook, ok := s.impl.(interface { + OnWebSocketDisconnect(webConnID, userID string) + }); ok { + hook.OnWebSocketDisconnect(args.A, args.B) + } else { + return encodableError(fmt.Errorf("Hook OnWebSocketDisconnect called but not implemented.")) + } + return nil +} + +func init() { + hookNameToId["WebSocketMessageHasBeenPosted"] = WebSocketMessageHasBeenPostedID +} + +type Z_WebSocketMessageHasBeenPostedArgs struct { + A string + B string + C *model.WebSocketRequest +} + +type Z_WebSocketMessageHasBeenPostedReturns struct { +} + +func (g *hooksRPCClient) WebSocketMessageHasBeenPosted(webConnID, userID string, req *model.WebSocketRequest) { + _args := &Z_WebSocketMessageHasBeenPostedArgs{webConnID, userID, req} + _returns := &Z_WebSocketMessageHasBeenPostedReturns{} + if g.implemented[WebSocketMessageHasBeenPostedID] { + if err := g.client.Call("Plugin.WebSocketMessageHasBeenPosted", _args, _returns); err != nil { + g.log.Error("RPC call WebSocketMessageHasBeenPosted to plugin failed.", mlog.Err(err)) + } + } + +} + +func (s *hooksRPCServer) WebSocketMessageHasBeenPosted(args *Z_WebSocketMessageHasBeenPostedArgs, returns *Z_WebSocketMessageHasBeenPostedReturns) error { + if hook, ok := s.impl.(interface { + WebSocketMessageHasBeenPosted(webConnID, userID string, req *model.WebSocketRequest) + }); ok { + hook.WebSocketMessageHasBeenPosted(args.A, args.B, args.C) + } else { + return encodableError(fmt.Errorf("Hook WebSocketMessageHasBeenPosted called but not implemented.")) + } + return nil +} + +func init() { + hookNameToId["RunDataRetention"] = RunDataRetentionID +} + +type Z_RunDataRetentionArgs struct { + A int64 + B int64 +} + +type Z_RunDataRetentionReturns struct { + A int64 + B error +} + +func (g *hooksRPCClient) RunDataRetention(nowTime, batchSize int64) (int64, error) { + _args := &Z_RunDataRetentionArgs{nowTime, batchSize} + _returns := &Z_RunDataRetentionReturns{} + if g.implemented[RunDataRetentionID] { + if err := g.client.Call("Plugin.RunDataRetention", _args, _returns); err != nil { + g.log.Error("RPC call RunDataRetention to plugin failed.", mlog.Err(err)) + } + } + return _returns.A, _returns.B +} + +func (s *hooksRPCServer) RunDataRetention(args *Z_RunDataRetentionArgs, returns *Z_RunDataRetentionReturns) error { + if hook, ok := s.impl.(interface { + RunDataRetention(nowTime, batchSize int64) (int64, error) + }); ok { + returns.A, returns.B = hook.RunDataRetention(args.A, args.B) + returns.B = encodableError(returns.B) + } else { + return encodableError(fmt.Errorf("Hook RunDataRetention called but not implemented.")) + } + return nil +} + +func init() { + hookNameToId["OnInstall"] = OnInstallID +} + +type Z_OnInstallArgs struct { + A *Context + B model.OnInstallEvent +} + +type Z_OnInstallReturns struct { + A error +} + +func (g *hooksRPCClient) OnInstall(c *Context, event model.OnInstallEvent) error { + _args := &Z_OnInstallArgs{c, event} + _returns := &Z_OnInstallReturns{} + if g.implemented[OnInstallID] { + if err := g.client.Call("Plugin.OnInstall", _args, _returns); err != nil { + g.log.Error("RPC call OnInstall to plugin failed.", mlog.Err(err)) + } + } + return _returns.A +} + +func (s *hooksRPCServer) OnInstall(args *Z_OnInstallArgs, returns *Z_OnInstallReturns) error { + if hook, ok := s.impl.(interface { + OnInstall(c *Context, event model.OnInstallEvent) error + }); ok { + returns.A = hook.OnInstall(args.A, args.B) + returns.A = encodableError(returns.A) + } else { + return encodableError(fmt.Errorf("Hook OnInstall called but not implemented.")) + } + return nil +} + +func init() { + hookNameToId["OnSendDailyTelemetry"] = OnSendDailyTelemetryID +} + +type Z_OnSendDailyTelemetryArgs struct { +} + +type Z_OnSendDailyTelemetryReturns struct { +} + +func (g *hooksRPCClient) OnSendDailyTelemetry() { + _args := &Z_OnSendDailyTelemetryArgs{} + _returns := &Z_OnSendDailyTelemetryReturns{} + if g.implemented[OnSendDailyTelemetryID] { + if err := g.client.Call("Plugin.OnSendDailyTelemetry", _args, _returns); err != nil { + g.log.Error("RPC call OnSendDailyTelemetry to plugin failed.", mlog.Err(err)) + } + } + +} + +func (s *hooksRPCServer) OnSendDailyTelemetry(args *Z_OnSendDailyTelemetryArgs, returns *Z_OnSendDailyTelemetryReturns) error { + if hook, ok := s.impl.(interface { + OnSendDailyTelemetry() + }); ok { + hook.OnSendDailyTelemetry() + } else { + return encodableError(fmt.Errorf("Hook OnSendDailyTelemetry called but not implemented.")) + } + return nil +} + +func init() { + hookNameToId["OnCloudLimitsUpdated"] = OnCloudLimitsUpdatedID +} + +type Z_OnCloudLimitsUpdatedArgs struct { + A *model.ProductLimits +} + +type Z_OnCloudLimitsUpdatedReturns struct { +} + +func (g *hooksRPCClient) OnCloudLimitsUpdated(limits *model.ProductLimits) { + _args := &Z_OnCloudLimitsUpdatedArgs{limits} + _returns := &Z_OnCloudLimitsUpdatedReturns{} + if g.implemented[OnCloudLimitsUpdatedID] { + if err := g.client.Call("Plugin.OnCloudLimitsUpdated", _args, _returns); err != nil { + g.log.Error("RPC call OnCloudLimitsUpdated to plugin failed.", mlog.Err(err)) + } + } + +} + +func (s *hooksRPCServer) OnCloudLimitsUpdated(args *Z_OnCloudLimitsUpdatedArgs, returns *Z_OnCloudLimitsUpdatedReturns) error { + if hook, ok := s.impl.(interface { + OnCloudLimitsUpdated(limits *model.ProductLimits) + }); ok { + hook.OnCloudLimitsUpdated(args.A) + } else { + return encodableError(fmt.Errorf("Hook OnCloudLimitsUpdated called but not implemented.")) + } + return nil +} + +func init() { + hookNameToId["ConfigurationWillBeSaved"] = ConfigurationWillBeSavedID +} + +type Z_ConfigurationWillBeSavedArgs struct { + A *model.Config +} + +type Z_ConfigurationWillBeSavedReturns struct { + A *model.Config + B error +} + +func (g *hooksRPCClient) ConfigurationWillBeSaved(newCfg *model.Config) (*model.Config, error) { + _args := &Z_ConfigurationWillBeSavedArgs{newCfg} + _returns := &Z_ConfigurationWillBeSavedReturns{} + if g.implemented[ConfigurationWillBeSavedID] { + if err := g.client.Call("Plugin.ConfigurationWillBeSaved", _args, _returns); err != nil { + g.log.Error("RPC call ConfigurationWillBeSaved to plugin failed.", mlog.Err(err)) + } + } + return _returns.A, _returns.B +} + +func (s *hooksRPCServer) ConfigurationWillBeSaved(args *Z_ConfigurationWillBeSavedArgs, returns *Z_ConfigurationWillBeSavedReturns) error { + if hook, ok := s.impl.(interface { + ConfigurationWillBeSaved(newCfg *model.Config) (*model.Config, error) + }); ok { + returns.A, returns.B = hook.ConfigurationWillBeSaved(args.A) + returns.B = encodableError(returns.B) + } else { + return encodableError(fmt.Errorf("Hook ConfigurationWillBeSaved called but not implemented.")) + } + return nil +} + +func init() { + hookNameToId["EmailNotificationWillBeSent"] = EmailNotificationWillBeSentID +} + +type Z_EmailNotificationWillBeSentArgs struct { + A *model.EmailNotification +} + +type Z_EmailNotificationWillBeSentReturns struct { + A *model.EmailNotificationContent + B string +} + +func (g *hooksRPCClient) EmailNotificationWillBeSent(emailNotification *model.EmailNotification) (*model.EmailNotificationContent, string) { + _args := &Z_EmailNotificationWillBeSentArgs{emailNotification} + _returns := &Z_EmailNotificationWillBeSentReturns{} + if g.implemented[EmailNotificationWillBeSentID] { + if err := g.client.Call("Plugin.EmailNotificationWillBeSent", _args, _returns); err != nil { + g.log.Error("RPC call EmailNotificationWillBeSent to plugin failed.", mlog.Err(err)) + } + } + return _returns.A, _returns.B +} + +func (s *hooksRPCServer) EmailNotificationWillBeSent(args *Z_EmailNotificationWillBeSentArgs, returns *Z_EmailNotificationWillBeSentReturns) error { + if hook, ok := s.impl.(interface { + EmailNotificationWillBeSent(emailNotification *model.EmailNotification) (*model.EmailNotificationContent, string) + }); ok { + returns.A, returns.B = hook.EmailNotificationWillBeSent(args.A) + } else { + return encodableError(fmt.Errorf("Hook EmailNotificationWillBeSent called but not implemented.")) + } + return nil +} + +func init() { + hookNameToId["NotificationWillBePushed"] = NotificationWillBePushedID +} + +type Z_NotificationWillBePushedArgs struct { + A *model.PushNotification + B string +} + +type Z_NotificationWillBePushedReturns struct { + A *model.PushNotification + B string +} + +func (g *hooksRPCClient) NotificationWillBePushed(pushNotification *model.PushNotification, userID string) (*model.PushNotification, string) { + _args := &Z_NotificationWillBePushedArgs{pushNotification, userID} + _returns := &Z_NotificationWillBePushedReturns{} + if g.implemented[NotificationWillBePushedID] { + if err := g.client.Call("Plugin.NotificationWillBePushed", _args, _returns); err != nil { + g.log.Error("RPC call NotificationWillBePushed to plugin failed.", mlog.Err(err)) + } + } + return _returns.A, _returns.B +} + +func (s *hooksRPCServer) NotificationWillBePushed(args *Z_NotificationWillBePushedArgs, returns *Z_NotificationWillBePushedReturns) error { + if hook, ok := s.impl.(interface { + NotificationWillBePushed(pushNotification *model.PushNotification, userID string) (*model.PushNotification, string) + }); ok { + returns.A, returns.B = hook.NotificationWillBePushed(args.A, args.B) + } else { + return encodableError(fmt.Errorf("Hook NotificationWillBePushed called but not implemented.")) + } + return nil +} + +func init() { + hookNameToId["UserHasBeenDeactivated"] = UserHasBeenDeactivatedID +} + +type Z_UserHasBeenDeactivatedArgs struct { + A *Context + B *model.User +} + +type Z_UserHasBeenDeactivatedReturns struct { +} + +func (g *hooksRPCClient) UserHasBeenDeactivated(c *Context, user *model.User) { + _args := &Z_UserHasBeenDeactivatedArgs{c, user} + _returns := &Z_UserHasBeenDeactivatedReturns{} + if g.implemented[UserHasBeenDeactivatedID] { + if err := g.client.Call("Plugin.UserHasBeenDeactivated", _args, _returns); err != nil { + g.log.Error("RPC call UserHasBeenDeactivated to plugin failed.", mlog.Err(err)) + } + } + +} + +func (s *hooksRPCServer) UserHasBeenDeactivated(args *Z_UserHasBeenDeactivatedArgs, returns *Z_UserHasBeenDeactivatedReturns) error { + if hook, ok := s.impl.(interface { + UserHasBeenDeactivated(c *Context, user *model.User) + }); ok { + hook.UserHasBeenDeactivated(args.A, args.B) + } else { + return encodableError(fmt.Errorf("Hook UserHasBeenDeactivated called but not implemented.")) + } + return nil +} + +func init() { + hookNameToId["OnSharedChannelsSyncMsg"] = OnSharedChannelsSyncMsgID +} + +type Z_OnSharedChannelsSyncMsgArgs struct { + A *model.SyncMsg + B *model.RemoteCluster +} + +type Z_OnSharedChannelsSyncMsgReturns struct { + A model.SyncResponse + B error +} + +func (g *hooksRPCClient) OnSharedChannelsSyncMsg(msg *model.SyncMsg, rc *model.RemoteCluster) (model.SyncResponse, error) { + _args := &Z_OnSharedChannelsSyncMsgArgs{msg, rc} + _returns := &Z_OnSharedChannelsSyncMsgReturns{} + if g.implemented[OnSharedChannelsSyncMsgID] { + if err := g.client.Call("Plugin.OnSharedChannelsSyncMsg", _args, _returns); err != nil { + g.log.Error("RPC call OnSharedChannelsSyncMsg to plugin failed.", mlog.Err(err)) + } + } + return _returns.A, _returns.B +} + +func (s *hooksRPCServer) OnSharedChannelsSyncMsg(args *Z_OnSharedChannelsSyncMsgArgs, returns *Z_OnSharedChannelsSyncMsgReturns) error { + if hook, ok := s.impl.(interface { + OnSharedChannelsSyncMsg(msg *model.SyncMsg, rc *model.RemoteCluster) (model.SyncResponse, error) + }); ok { + returns.A, returns.B = hook.OnSharedChannelsSyncMsg(args.A, args.B) + returns.B = encodableError(returns.B) + } else { + return encodableError(fmt.Errorf("Hook OnSharedChannelsSyncMsg called but not implemented.")) + } + return nil +} + +func init() { + hookNameToId["OnSharedChannelsPing"] = OnSharedChannelsPingID +} + +type Z_OnSharedChannelsPingArgs struct { + A *model.RemoteCluster +} + +type Z_OnSharedChannelsPingReturns struct { + A bool +} + +func (g *hooksRPCClient) OnSharedChannelsPing(rc *model.RemoteCluster) bool { + _args := &Z_OnSharedChannelsPingArgs{rc} + _returns := &Z_OnSharedChannelsPingReturns{} + if g.implemented[OnSharedChannelsPingID] { + if err := g.client.Call("Plugin.OnSharedChannelsPing", _args, _returns); err != nil { + g.log.Error("RPC call OnSharedChannelsPing to plugin failed.", mlog.Err(err)) + } + } + return _returns.A +} + +func (s *hooksRPCServer) OnSharedChannelsPing(args *Z_OnSharedChannelsPingArgs, returns *Z_OnSharedChannelsPingReturns) error { + if hook, ok := s.impl.(interface { + OnSharedChannelsPing(rc *model.RemoteCluster) bool + }); ok { + returns.A = hook.OnSharedChannelsPing(args.A) + } else { + return encodableError(fmt.Errorf("Hook OnSharedChannelsPing called but not implemented.")) + } + return nil +} + +func init() { + hookNameToId["PreferencesHaveChanged"] = PreferencesHaveChangedID +} + +type Z_PreferencesHaveChangedArgs struct { + A *Context + B []model.Preference +} + +type Z_PreferencesHaveChangedReturns struct { +} + +func (g *hooksRPCClient) PreferencesHaveChanged(c *Context, preferences []model.Preference) { + _args := &Z_PreferencesHaveChangedArgs{c, preferences} + _returns := &Z_PreferencesHaveChangedReturns{} + if g.implemented[PreferencesHaveChangedID] { + if err := g.client.Call("Plugin.PreferencesHaveChanged", _args, _returns); err != nil { + g.log.Error("RPC call PreferencesHaveChanged to plugin failed.", mlog.Err(err)) + } + } + +} + +func (s *hooksRPCServer) PreferencesHaveChanged(args *Z_PreferencesHaveChangedArgs, returns *Z_PreferencesHaveChangedReturns) error { + if hook, ok := s.impl.(interface { + PreferencesHaveChanged(c *Context, preferences []model.Preference) + }); ok { + hook.PreferencesHaveChanged(args.A, args.B) + } else { + return encodableError(fmt.Errorf("Hook PreferencesHaveChanged called but not implemented.")) + } + return nil +} + +func init() { + hookNameToId["OnSharedChannelsAttachmentSyncMsg"] = OnSharedChannelsAttachmentSyncMsgID +} + +type Z_OnSharedChannelsAttachmentSyncMsgArgs struct { + A *model.FileInfo + B *model.Post + C *model.RemoteCluster +} + +type Z_OnSharedChannelsAttachmentSyncMsgReturns struct { + A error +} + +func (g *hooksRPCClient) OnSharedChannelsAttachmentSyncMsg(fi *model.FileInfo, post *model.Post, rc *model.RemoteCluster) error { + _args := &Z_OnSharedChannelsAttachmentSyncMsgArgs{fi, post, rc} + _returns := &Z_OnSharedChannelsAttachmentSyncMsgReturns{} + if g.implemented[OnSharedChannelsAttachmentSyncMsgID] { + if err := g.client.Call("Plugin.OnSharedChannelsAttachmentSyncMsg", _args, _returns); err != nil { + g.log.Error("RPC call OnSharedChannelsAttachmentSyncMsg to plugin failed.", mlog.Err(err)) + } + } + return _returns.A +} + +func (s *hooksRPCServer) OnSharedChannelsAttachmentSyncMsg(args *Z_OnSharedChannelsAttachmentSyncMsgArgs, returns *Z_OnSharedChannelsAttachmentSyncMsgReturns) error { + if hook, ok := s.impl.(interface { + OnSharedChannelsAttachmentSyncMsg(fi *model.FileInfo, post *model.Post, rc *model.RemoteCluster) error + }); ok { + returns.A = hook.OnSharedChannelsAttachmentSyncMsg(args.A, args.B, args.C) + returns.A = encodableError(returns.A) + } else { + return encodableError(fmt.Errorf("Hook OnSharedChannelsAttachmentSyncMsg called but not implemented.")) + } + return nil +} + +func init() { + hookNameToId["OnSharedChannelsProfileImageSyncMsg"] = OnSharedChannelsProfileImageSyncMsgID +} + +type Z_OnSharedChannelsProfileImageSyncMsgArgs struct { + A *model.User + B *model.RemoteCluster +} + +type Z_OnSharedChannelsProfileImageSyncMsgReturns struct { + A error +} + +func (g *hooksRPCClient) OnSharedChannelsProfileImageSyncMsg(user *model.User, rc *model.RemoteCluster) error { + _args := &Z_OnSharedChannelsProfileImageSyncMsgArgs{user, rc} + _returns := &Z_OnSharedChannelsProfileImageSyncMsgReturns{} + if g.implemented[OnSharedChannelsProfileImageSyncMsgID] { + if err := g.client.Call("Plugin.OnSharedChannelsProfileImageSyncMsg", _args, _returns); err != nil { + g.log.Error("RPC call OnSharedChannelsProfileImageSyncMsg to plugin failed.", mlog.Err(err)) + } + } + return _returns.A +} + +func (s *hooksRPCServer) OnSharedChannelsProfileImageSyncMsg(args *Z_OnSharedChannelsProfileImageSyncMsgArgs, returns *Z_OnSharedChannelsProfileImageSyncMsgReturns) error { + if hook, ok := s.impl.(interface { + OnSharedChannelsProfileImageSyncMsg(user *model.User, rc *model.RemoteCluster) error + }); ok { + returns.A = hook.OnSharedChannelsProfileImageSyncMsg(args.A, args.B) + returns.A = encodableError(returns.A) + } else { + return encodableError(fmt.Errorf("Hook OnSharedChannelsProfileImageSyncMsg called but not implemented.")) + } + return nil +} + +func init() { + hookNameToId["GenerateSupportData"] = GenerateSupportDataID +} + +type Z_GenerateSupportDataArgs struct { + A *Context +} + +type Z_GenerateSupportDataReturns struct { + A []*model.FileData + B error +} + +func (g *hooksRPCClient) GenerateSupportData(c *Context) ([]*model.FileData, error) { + _args := &Z_GenerateSupportDataArgs{c} + _returns := &Z_GenerateSupportDataReturns{} + if g.implemented[GenerateSupportDataID] { + if err := g.client.Call("Plugin.GenerateSupportData", _args, _returns); err != nil { + g.log.Error("RPC call GenerateSupportData to plugin failed.", mlog.Err(err)) + } + } + return _returns.A, _returns.B +} + +func (s *hooksRPCServer) GenerateSupportData(args *Z_GenerateSupportDataArgs, returns *Z_GenerateSupportDataReturns) error { + if hook, ok := s.impl.(interface { + GenerateSupportData(c *Context) ([]*model.FileData, error) + }); ok { + returns.A, returns.B = hook.GenerateSupportData(args.A) + returns.B = encodableError(returns.B) + } else { + return encodableError(fmt.Errorf("Hook GenerateSupportData called but not implemented.")) + } + return nil +} + +func init() { + hookNameToId["OnSAMLLogin"] = OnSAMLLoginID +} + +type Z_OnSAMLLoginArgs struct { + A *Context + B *model.User + C *saml2.AssertionInfo +} + +type Z_OnSAMLLoginReturns struct { + A error +} + +func (g *hooksRPCClient) OnSAMLLogin(c *Context, user *model.User, assertion *saml2.AssertionInfo) error { + _args := &Z_OnSAMLLoginArgs{c, user, assertion} + _returns := &Z_OnSAMLLoginReturns{} + if g.implemented[OnSAMLLoginID] { + if err := g.client.Call("Plugin.OnSAMLLogin", _args, _returns); err != nil { + g.log.Error("RPC call OnSAMLLogin to plugin failed.", mlog.Err(err)) + } + } + return _returns.A +} + +func (s *hooksRPCServer) OnSAMLLogin(args *Z_OnSAMLLoginArgs, returns *Z_OnSAMLLoginReturns) error { + if hook, ok := s.impl.(interface { + OnSAMLLogin(c *Context, user *model.User, assertion *saml2.AssertionInfo) error + }); ok { + returns.A = hook.OnSAMLLogin(args.A, args.B, args.C) + returns.A = encodableError(returns.A) + } else { + return encodableError(fmt.Errorf("Hook OnSAMLLogin called but not implemented.")) + } + return nil +} + +type Z_RegisterCommandArgs struct { + A *model.Command +} + +type Z_RegisterCommandReturns struct { + A error +} + +func (g *apiRPCClient) RegisterCommand(command *model.Command) error { + _args := &Z_RegisterCommandArgs{command} + _returns := &Z_RegisterCommandReturns{} + if err := g.client.Call("Plugin.RegisterCommand", _args, _returns); err != nil { + log.Printf("RPC call to RegisterCommand API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) RegisterCommand(args *Z_RegisterCommandArgs, returns *Z_RegisterCommandReturns) error { + if hook, ok := s.impl.(interface { + RegisterCommand(command *model.Command) error + }); ok { + returns.A = hook.RegisterCommand(args.A) + returns.A = encodableError(returns.A) + } else { + return encodableError(fmt.Errorf("API RegisterCommand called but not implemented.")) + } + return nil +} + +type Z_UnregisterCommandArgs struct { + A string + B string +} + +type Z_UnregisterCommandReturns struct { + A error +} + +func (g *apiRPCClient) UnregisterCommand(teamID, trigger string) error { + _args := &Z_UnregisterCommandArgs{teamID, trigger} + _returns := &Z_UnregisterCommandReturns{} + if err := g.client.Call("Plugin.UnregisterCommand", _args, _returns); err != nil { + log.Printf("RPC call to UnregisterCommand API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) UnregisterCommand(args *Z_UnregisterCommandArgs, returns *Z_UnregisterCommandReturns) error { + if hook, ok := s.impl.(interface { + UnregisterCommand(teamID, trigger string) error + }); ok { + returns.A = hook.UnregisterCommand(args.A, args.B) + returns.A = encodableError(returns.A) + } else { + return encodableError(fmt.Errorf("API UnregisterCommand called but not implemented.")) + } + return nil +} + +type Z_ExecuteSlashCommandArgs struct { + A *model.CommandArgs +} + +type Z_ExecuteSlashCommandReturns struct { + A *model.CommandResponse + B error +} + +func (g *apiRPCClient) ExecuteSlashCommand(commandArgs *model.CommandArgs) (*model.CommandResponse, error) { + _args := &Z_ExecuteSlashCommandArgs{commandArgs} + _returns := &Z_ExecuteSlashCommandReturns{} + if err := g.client.Call("Plugin.ExecuteSlashCommand", _args, _returns); err != nil { + log.Printf("RPC call to ExecuteSlashCommand API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) ExecuteSlashCommand(args *Z_ExecuteSlashCommandArgs, returns *Z_ExecuteSlashCommandReturns) error { + if hook, ok := s.impl.(interface { + ExecuteSlashCommand(commandArgs *model.CommandArgs) (*model.CommandResponse, error) + }); ok { + returns.A, returns.B = hook.ExecuteSlashCommand(args.A) + returns.B = encodableError(returns.B) + } else { + return encodableError(fmt.Errorf("API ExecuteSlashCommand called but not implemented.")) + } + return nil +} + +type Z_GetConfigArgs struct { +} + +type Z_GetConfigReturns struct { + A *model.Config +} + +func (g *apiRPCClient) GetConfig() *model.Config { + _args := &Z_GetConfigArgs{} + _returns := &Z_GetConfigReturns{} + if err := g.client.Call("Plugin.GetConfig", _args, _returns); err != nil { + log.Printf("RPC call to GetConfig API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) GetConfig(args *Z_GetConfigArgs, returns *Z_GetConfigReturns) error { + if hook, ok := s.impl.(interface { + GetConfig() *model.Config + }); ok { + returns.A = hook.GetConfig() + } else { + return encodableError(fmt.Errorf("API GetConfig called but not implemented.")) + } + return nil +} + +type Z_GetUnsanitizedConfigArgs struct { +} + +type Z_GetUnsanitizedConfigReturns struct { + A *model.Config +} + +func (g *apiRPCClient) GetUnsanitizedConfig() *model.Config { + _args := &Z_GetUnsanitizedConfigArgs{} + _returns := &Z_GetUnsanitizedConfigReturns{} + if err := g.client.Call("Plugin.GetUnsanitizedConfig", _args, _returns); err != nil { + log.Printf("RPC call to GetUnsanitizedConfig API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) GetUnsanitizedConfig(args *Z_GetUnsanitizedConfigArgs, returns *Z_GetUnsanitizedConfigReturns) error { + if hook, ok := s.impl.(interface { + GetUnsanitizedConfig() *model.Config + }); ok { + returns.A = hook.GetUnsanitizedConfig() + } else { + return encodableError(fmt.Errorf("API GetUnsanitizedConfig called but not implemented.")) + } + return nil +} + +type Z_SaveConfigArgs struct { + A *model.Config +} + +type Z_SaveConfigReturns struct { + A *model.AppError +} + +func (g *apiRPCClient) SaveConfig(config *model.Config) *model.AppError { + _args := &Z_SaveConfigArgs{config} + _returns := &Z_SaveConfigReturns{} + if err := g.client.Call("Plugin.SaveConfig", _args, _returns); err != nil { + log.Printf("RPC call to SaveConfig API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) SaveConfig(args *Z_SaveConfigArgs, returns *Z_SaveConfigReturns) error { + if hook, ok := s.impl.(interface { + SaveConfig(config *model.Config) *model.AppError + }); ok { + returns.A = hook.SaveConfig(args.A) + } else { + return encodableError(fmt.Errorf("API SaveConfig called but not implemented.")) + } + return nil +} + +type Z_GetPluginConfigArgs struct { +} + +type Z_GetPluginConfigReturns struct { + A map[string]any +} + +func (g *apiRPCClient) GetPluginConfig() map[string]any { + _args := &Z_GetPluginConfigArgs{} + _returns := &Z_GetPluginConfigReturns{} + if err := g.client.Call("Plugin.GetPluginConfig", _args, _returns); err != nil { + log.Printf("RPC call to GetPluginConfig API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) GetPluginConfig(args *Z_GetPluginConfigArgs, returns *Z_GetPluginConfigReturns) error { + if hook, ok := s.impl.(interface { + GetPluginConfig() map[string]any + }); ok { + returns.A = hook.GetPluginConfig() + } else { + return encodableError(fmt.Errorf("API GetPluginConfig called but not implemented.")) + } + return nil +} + +type Z_SavePluginConfigArgs struct { + A map[string]any +} + +type Z_SavePluginConfigReturns struct { + A *model.AppError +} + +func (g *apiRPCClient) SavePluginConfig(config map[string]any) *model.AppError { + _args := &Z_SavePluginConfigArgs{config} + _returns := &Z_SavePluginConfigReturns{} + if err := g.client.Call("Plugin.SavePluginConfig", _args, _returns); err != nil { + log.Printf("RPC call to SavePluginConfig API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) SavePluginConfig(args *Z_SavePluginConfigArgs, returns *Z_SavePluginConfigReturns) error { + if hook, ok := s.impl.(interface { + SavePluginConfig(config map[string]any) *model.AppError + }); ok { + returns.A = hook.SavePluginConfig(args.A) + } else { + return encodableError(fmt.Errorf("API SavePluginConfig called but not implemented.")) + } + return nil +} + +type Z_GetBundlePathArgs struct { +} + +type Z_GetBundlePathReturns struct { + A string + B error +} + +func (g *apiRPCClient) GetBundlePath() (string, error) { + _args := &Z_GetBundlePathArgs{} + _returns := &Z_GetBundlePathReturns{} + if err := g.client.Call("Plugin.GetBundlePath", _args, _returns); err != nil { + log.Printf("RPC call to GetBundlePath API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetBundlePath(args *Z_GetBundlePathArgs, returns *Z_GetBundlePathReturns) error { + if hook, ok := s.impl.(interface { + GetBundlePath() (string, error) + }); ok { + returns.A, returns.B = hook.GetBundlePath() + returns.B = encodableError(returns.B) + } else { + return encodableError(fmt.Errorf("API GetBundlePath called but not implemented.")) + } + return nil +} + +type Z_GetLicenseArgs struct { +} + +type Z_GetLicenseReturns struct { + A *model.License +} + +func (g *apiRPCClient) GetLicense() *model.License { + _args := &Z_GetLicenseArgs{} + _returns := &Z_GetLicenseReturns{} + if err := g.client.Call("Plugin.GetLicense", _args, _returns); err != nil { + log.Printf("RPC call to GetLicense API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) GetLicense(args *Z_GetLicenseArgs, returns *Z_GetLicenseReturns) error { + if hook, ok := s.impl.(interface { + GetLicense() *model.License + }); ok { + returns.A = hook.GetLicense() + } else { + return encodableError(fmt.Errorf("API GetLicense called but not implemented.")) + } + return nil +} + +type Z_IsEnterpriseReadyArgs struct { +} + +type Z_IsEnterpriseReadyReturns struct { + A bool +} + +func (g *apiRPCClient) IsEnterpriseReady() bool { + _args := &Z_IsEnterpriseReadyArgs{} + _returns := &Z_IsEnterpriseReadyReturns{} + if err := g.client.Call("Plugin.IsEnterpriseReady", _args, _returns); err != nil { + log.Printf("RPC call to IsEnterpriseReady API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) IsEnterpriseReady(args *Z_IsEnterpriseReadyArgs, returns *Z_IsEnterpriseReadyReturns) error { + if hook, ok := s.impl.(interface { + IsEnterpriseReady() bool + }); ok { + returns.A = hook.IsEnterpriseReady() + } else { + return encodableError(fmt.Errorf("API IsEnterpriseReady called but not implemented.")) + } + return nil +} + +type Z_GetServerVersionArgs struct { +} + +type Z_GetServerVersionReturns struct { + A string +} + +func (g *apiRPCClient) GetServerVersion() string { + _args := &Z_GetServerVersionArgs{} + _returns := &Z_GetServerVersionReturns{} + if err := g.client.Call("Plugin.GetServerVersion", _args, _returns); err != nil { + log.Printf("RPC call to GetServerVersion API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) GetServerVersion(args *Z_GetServerVersionArgs, returns *Z_GetServerVersionReturns) error { + if hook, ok := s.impl.(interface { + GetServerVersion() string + }); ok { + returns.A = hook.GetServerVersion() + } else { + return encodableError(fmt.Errorf("API GetServerVersion called but not implemented.")) + } + return nil +} + +type Z_GetSystemInstallDateArgs struct { +} + +type Z_GetSystemInstallDateReturns struct { + A int64 + B *model.AppError +} + +func (g *apiRPCClient) GetSystemInstallDate() (int64, *model.AppError) { + _args := &Z_GetSystemInstallDateArgs{} + _returns := &Z_GetSystemInstallDateReturns{} + if err := g.client.Call("Plugin.GetSystemInstallDate", _args, _returns); err != nil { + log.Printf("RPC call to GetSystemInstallDate API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetSystemInstallDate(args *Z_GetSystemInstallDateArgs, returns *Z_GetSystemInstallDateReturns) error { + if hook, ok := s.impl.(interface { + GetSystemInstallDate() (int64, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetSystemInstallDate() + } else { + return encodableError(fmt.Errorf("API GetSystemInstallDate called but not implemented.")) + } + return nil +} + +type Z_GetDiagnosticIdArgs struct { +} + +type Z_GetDiagnosticIdReturns struct { + A string +} + +func (g *apiRPCClient) GetDiagnosticId() string { + _args := &Z_GetDiagnosticIdArgs{} + _returns := &Z_GetDiagnosticIdReturns{} + if err := g.client.Call("Plugin.GetDiagnosticId", _args, _returns); err != nil { + log.Printf("RPC call to GetDiagnosticId API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) GetDiagnosticId(args *Z_GetDiagnosticIdArgs, returns *Z_GetDiagnosticIdReturns) error { + if hook, ok := s.impl.(interface { + GetDiagnosticId() string + }); ok { + returns.A = hook.GetDiagnosticId() + } else { + return encodableError(fmt.Errorf("API GetDiagnosticId called but not implemented.")) + } + return nil +} + +type Z_GetTelemetryIdArgs struct { +} + +type Z_GetTelemetryIdReturns struct { + A string +} + +func (g *apiRPCClient) GetTelemetryId() string { + _args := &Z_GetTelemetryIdArgs{} + _returns := &Z_GetTelemetryIdReturns{} + if err := g.client.Call("Plugin.GetTelemetryId", _args, _returns); err != nil { + log.Printf("RPC call to GetTelemetryId API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) GetTelemetryId(args *Z_GetTelemetryIdArgs, returns *Z_GetTelemetryIdReturns) error { + if hook, ok := s.impl.(interface { + GetTelemetryId() string + }); ok { + returns.A = hook.GetTelemetryId() + } else { + return encodableError(fmt.Errorf("API GetTelemetryId called but not implemented.")) + } + return nil +} + +type Z_CreateUserArgs struct { + A *model.User +} + +type Z_CreateUserReturns struct { + A *model.User + B *model.AppError +} + +func (g *apiRPCClient) CreateUser(user *model.User) (*model.User, *model.AppError) { + _args := &Z_CreateUserArgs{user} + _returns := &Z_CreateUserReturns{} + if err := g.client.Call("Plugin.CreateUser", _args, _returns); err != nil { + log.Printf("RPC call to CreateUser API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) CreateUser(args *Z_CreateUserArgs, returns *Z_CreateUserReturns) error { + if hook, ok := s.impl.(interface { + CreateUser(user *model.User) (*model.User, *model.AppError) + }); ok { + returns.A, returns.B = hook.CreateUser(args.A) + } else { + return encodableError(fmt.Errorf("API CreateUser called but not implemented.")) + } + return nil +} + +type Z_DeleteUserArgs struct { + A string +} + +type Z_DeleteUserReturns struct { + A *model.AppError +} + +func (g *apiRPCClient) DeleteUser(userID string) *model.AppError { + _args := &Z_DeleteUserArgs{userID} + _returns := &Z_DeleteUserReturns{} + if err := g.client.Call("Plugin.DeleteUser", _args, _returns); err != nil { + log.Printf("RPC call to DeleteUser API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) DeleteUser(args *Z_DeleteUserArgs, returns *Z_DeleteUserReturns) error { + if hook, ok := s.impl.(interface { + DeleteUser(userID string) *model.AppError + }); ok { + returns.A = hook.DeleteUser(args.A) + } else { + return encodableError(fmt.Errorf("API DeleteUser called but not implemented.")) + } + return nil +} + +type Z_GetUsersArgs struct { + A *model.UserGetOptions +} + +type Z_GetUsersReturns struct { + A []*model.User + B *model.AppError +} + +func (g *apiRPCClient) GetUsers(options *model.UserGetOptions) ([]*model.User, *model.AppError) { + _args := &Z_GetUsersArgs{options} + _returns := &Z_GetUsersReturns{} + if err := g.client.Call("Plugin.GetUsers", _args, _returns); err != nil { + log.Printf("RPC call to GetUsers API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetUsers(args *Z_GetUsersArgs, returns *Z_GetUsersReturns) error { + if hook, ok := s.impl.(interface { + GetUsers(options *model.UserGetOptions) ([]*model.User, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetUsers(args.A) + } else { + return encodableError(fmt.Errorf("API GetUsers called but not implemented.")) + } + return nil +} + +type Z_GetUsersByIdsArgs struct { + A []string +} + +type Z_GetUsersByIdsReturns struct { + A []*model.User + B *model.AppError +} + +func (g *apiRPCClient) GetUsersByIds(userIDs []string) ([]*model.User, *model.AppError) { + _args := &Z_GetUsersByIdsArgs{userIDs} + _returns := &Z_GetUsersByIdsReturns{} + if err := g.client.Call("Plugin.GetUsersByIds", _args, _returns); err != nil { + log.Printf("RPC call to GetUsersByIds API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetUsersByIds(args *Z_GetUsersByIdsArgs, returns *Z_GetUsersByIdsReturns) error { + if hook, ok := s.impl.(interface { + GetUsersByIds(userIDs []string) ([]*model.User, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetUsersByIds(args.A) + } else { + return encodableError(fmt.Errorf("API GetUsersByIds called but not implemented.")) + } + return nil +} + +type Z_GetUserArgs struct { + A string +} + +type Z_GetUserReturns struct { + A *model.User + B *model.AppError +} + +func (g *apiRPCClient) GetUser(userID string) (*model.User, *model.AppError) { + _args := &Z_GetUserArgs{userID} + _returns := &Z_GetUserReturns{} + if err := g.client.Call("Plugin.GetUser", _args, _returns); err != nil { + log.Printf("RPC call to GetUser API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetUser(args *Z_GetUserArgs, returns *Z_GetUserReturns) error { + if hook, ok := s.impl.(interface { + GetUser(userID string) (*model.User, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetUser(args.A) + } else { + return encodableError(fmt.Errorf("API GetUser called but not implemented.")) + } + return nil +} + +type Z_GetUserByEmailArgs struct { + A string +} + +type Z_GetUserByEmailReturns struct { + A *model.User + B *model.AppError +} + +func (g *apiRPCClient) GetUserByEmail(email string) (*model.User, *model.AppError) { + _args := &Z_GetUserByEmailArgs{email} + _returns := &Z_GetUserByEmailReturns{} + if err := g.client.Call("Plugin.GetUserByEmail", _args, _returns); err != nil { + log.Printf("RPC call to GetUserByEmail API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetUserByEmail(args *Z_GetUserByEmailArgs, returns *Z_GetUserByEmailReturns) error { + if hook, ok := s.impl.(interface { + GetUserByEmail(email string) (*model.User, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetUserByEmail(args.A) + } else { + return encodableError(fmt.Errorf("API GetUserByEmail called but not implemented.")) + } + return nil +} + +type Z_GetUserByUsernameArgs struct { + A string +} + +type Z_GetUserByUsernameReturns struct { + A *model.User + B *model.AppError +} + +func (g *apiRPCClient) GetUserByUsername(name string) (*model.User, *model.AppError) { + _args := &Z_GetUserByUsernameArgs{name} + _returns := &Z_GetUserByUsernameReturns{} + if err := g.client.Call("Plugin.GetUserByUsername", _args, _returns); err != nil { + log.Printf("RPC call to GetUserByUsername API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetUserByUsername(args *Z_GetUserByUsernameArgs, returns *Z_GetUserByUsernameReturns) error { + if hook, ok := s.impl.(interface { + GetUserByUsername(name string) (*model.User, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetUserByUsername(args.A) + } else { + return encodableError(fmt.Errorf("API GetUserByUsername called but not implemented.")) + } + return nil +} + +type Z_GetUsersByUsernamesArgs struct { + A []string +} + +type Z_GetUsersByUsernamesReturns struct { + A []*model.User + B *model.AppError +} + +func (g *apiRPCClient) GetUsersByUsernames(usernames []string) ([]*model.User, *model.AppError) { + _args := &Z_GetUsersByUsernamesArgs{usernames} + _returns := &Z_GetUsersByUsernamesReturns{} + if err := g.client.Call("Plugin.GetUsersByUsernames", _args, _returns); err != nil { + log.Printf("RPC call to GetUsersByUsernames API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetUsersByUsernames(args *Z_GetUsersByUsernamesArgs, returns *Z_GetUsersByUsernamesReturns) error { + if hook, ok := s.impl.(interface { + GetUsersByUsernames(usernames []string) ([]*model.User, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetUsersByUsernames(args.A) + } else { + return encodableError(fmt.Errorf("API GetUsersByUsernames called but not implemented.")) + } + return nil +} + +type Z_GetUsersInTeamArgs struct { + A string + B int + C int +} + +type Z_GetUsersInTeamReturns struct { + A []*model.User + B *model.AppError +} + +func (g *apiRPCClient) GetUsersInTeam(teamID string, page int, perPage int) ([]*model.User, *model.AppError) { + _args := &Z_GetUsersInTeamArgs{teamID, page, perPage} + _returns := &Z_GetUsersInTeamReturns{} + if err := g.client.Call("Plugin.GetUsersInTeam", _args, _returns); err != nil { + log.Printf("RPC call to GetUsersInTeam API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetUsersInTeam(args *Z_GetUsersInTeamArgs, returns *Z_GetUsersInTeamReturns) error { + if hook, ok := s.impl.(interface { + GetUsersInTeam(teamID string, page int, perPage int) ([]*model.User, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetUsersInTeam(args.A, args.B, args.C) + } else { + return encodableError(fmt.Errorf("API GetUsersInTeam called but not implemented.")) + } + return nil +} + +type Z_GetPreferenceForUserArgs struct { + A string + B string + C string +} + +type Z_GetPreferenceForUserReturns struct { + A model.Preference + B *model.AppError +} + +func (g *apiRPCClient) GetPreferenceForUser(userID, category, name string) (model.Preference, *model.AppError) { + _args := &Z_GetPreferenceForUserArgs{userID, category, name} + _returns := &Z_GetPreferenceForUserReturns{} + if err := g.client.Call("Plugin.GetPreferenceForUser", _args, _returns); err != nil { + log.Printf("RPC call to GetPreferenceForUser API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetPreferenceForUser(args *Z_GetPreferenceForUserArgs, returns *Z_GetPreferenceForUserReturns) error { + if hook, ok := s.impl.(interface { + GetPreferenceForUser(userID, category, name string) (model.Preference, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetPreferenceForUser(args.A, args.B, args.C) + } else { + return encodableError(fmt.Errorf("API GetPreferenceForUser called but not implemented.")) + } + return nil +} + +type Z_GetPreferencesForUserArgs struct { + A string +} + +type Z_GetPreferencesForUserReturns struct { + A []model.Preference + B *model.AppError +} + +func (g *apiRPCClient) GetPreferencesForUser(userID string) ([]model.Preference, *model.AppError) { + _args := &Z_GetPreferencesForUserArgs{userID} + _returns := &Z_GetPreferencesForUserReturns{} + if err := g.client.Call("Plugin.GetPreferencesForUser", _args, _returns); err != nil { + log.Printf("RPC call to GetPreferencesForUser API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetPreferencesForUser(args *Z_GetPreferencesForUserArgs, returns *Z_GetPreferencesForUserReturns) error { + if hook, ok := s.impl.(interface { + GetPreferencesForUser(userID string) ([]model.Preference, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetPreferencesForUser(args.A) + } else { + return encodableError(fmt.Errorf("API GetPreferencesForUser called but not implemented.")) + } + return nil +} + +type Z_UpdatePreferencesForUserArgs struct { + A string + B []model.Preference +} + +type Z_UpdatePreferencesForUserReturns struct { + A *model.AppError +} + +func (g *apiRPCClient) UpdatePreferencesForUser(userID string, preferences []model.Preference) *model.AppError { + _args := &Z_UpdatePreferencesForUserArgs{userID, preferences} + _returns := &Z_UpdatePreferencesForUserReturns{} + if err := g.client.Call("Plugin.UpdatePreferencesForUser", _args, _returns); err != nil { + log.Printf("RPC call to UpdatePreferencesForUser API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) UpdatePreferencesForUser(args *Z_UpdatePreferencesForUserArgs, returns *Z_UpdatePreferencesForUserReturns) error { + if hook, ok := s.impl.(interface { + UpdatePreferencesForUser(userID string, preferences []model.Preference) *model.AppError + }); ok { + returns.A = hook.UpdatePreferencesForUser(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API UpdatePreferencesForUser called but not implemented.")) + } + return nil +} + +type Z_DeletePreferencesForUserArgs struct { + A string + B []model.Preference +} + +type Z_DeletePreferencesForUserReturns struct { + A *model.AppError +} + +func (g *apiRPCClient) DeletePreferencesForUser(userID string, preferences []model.Preference) *model.AppError { + _args := &Z_DeletePreferencesForUserArgs{userID, preferences} + _returns := &Z_DeletePreferencesForUserReturns{} + if err := g.client.Call("Plugin.DeletePreferencesForUser", _args, _returns); err != nil { + log.Printf("RPC call to DeletePreferencesForUser API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) DeletePreferencesForUser(args *Z_DeletePreferencesForUserArgs, returns *Z_DeletePreferencesForUserReturns) error { + if hook, ok := s.impl.(interface { + DeletePreferencesForUser(userID string, preferences []model.Preference) *model.AppError + }); ok { + returns.A = hook.DeletePreferencesForUser(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API DeletePreferencesForUser called but not implemented.")) + } + return nil +} + +type Z_GetSessionArgs struct { + A string +} + +type Z_GetSessionReturns struct { + A *model.Session + B *model.AppError +} + +func (g *apiRPCClient) GetSession(sessionID string) (*model.Session, *model.AppError) { + _args := &Z_GetSessionArgs{sessionID} + _returns := &Z_GetSessionReturns{} + if err := g.client.Call("Plugin.GetSession", _args, _returns); err != nil { + log.Printf("RPC call to GetSession API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetSession(args *Z_GetSessionArgs, returns *Z_GetSessionReturns) error { + if hook, ok := s.impl.(interface { + GetSession(sessionID string) (*model.Session, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetSession(args.A) + } else { + return encodableError(fmt.Errorf("API GetSession called but not implemented.")) + } + return nil +} + +type Z_CreateSessionArgs struct { + A *model.Session +} + +type Z_CreateSessionReturns struct { + A *model.Session + B *model.AppError +} + +func (g *apiRPCClient) CreateSession(session *model.Session) (*model.Session, *model.AppError) { + _args := &Z_CreateSessionArgs{session} + _returns := &Z_CreateSessionReturns{} + if err := g.client.Call("Plugin.CreateSession", _args, _returns); err != nil { + log.Printf("RPC call to CreateSession API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) CreateSession(args *Z_CreateSessionArgs, returns *Z_CreateSessionReturns) error { + if hook, ok := s.impl.(interface { + CreateSession(session *model.Session) (*model.Session, *model.AppError) + }); ok { + returns.A, returns.B = hook.CreateSession(args.A) + } else { + return encodableError(fmt.Errorf("API CreateSession called but not implemented.")) + } + return nil +} + +type Z_ExtendSessionExpiryArgs struct { + A string + B int64 +} + +type Z_ExtendSessionExpiryReturns struct { + A *model.AppError +} + +func (g *apiRPCClient) ExtendSessionExpiry(sessionID string, newExpiry int64) *model.AppError { + _args := &Z_ExtendSessionExpiryArgs{sessionID, newExpiry} + _returns := &Z_ExtendSessionExpiryReturns{} + if err := g.client.Call("Plugin.ExtendSessionExpiry", _args, _returns); err != nil { + log.Printf("RPC call to ExtendSessionExpiry API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) ExtendSessionExpiry(args *Z_ExtendSessionExpiryArgs, returns *Z_ExtendSessionExpiryReturns) error { + if hook, ok := s.impl.(interface { + ExtendSessionExpiry(sessionID string, newExpiry int64) *model.AppError + }); ok { + returns.A = hook.ExtendSessionExpiry(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API ExtendSessionExpiry called but not implemented.")) + } + return nil +} + +type Z_RevokeSessionArgs struct { + A string +} + +type Z_RevokeSessionReturns struct { + A *model.AppError +} + +func (g *apiRPCClient) RevokeSession(sessionID string) *model.AppError { + _args := &Z_RevokeSessionArgs{sessionID} + _returns := &Z_RevokeSessionReturns{} + if err := g.client.Call("Plugin.RevokeSession", _args, _returns); err != nil { + log.Printf("RPC call to RevokeSession API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) RevokeSession(args *Z_RevokeSessionArgs, returns *Z_RevokeSessionReturns) error { + if hook, ok := s.impl.(interface { + RevokeSession(sessionID string) *model.AppError + }); ok { + returns.A = hook.RevokeSession(args.A) + } else { + return encodableError(fmt.Errorf("API RevokeSession called but not implemented.")) + } + return nil +} + +type Z_CreateUserAccessTokenArgs struct { + A *model.UserAccessToken +} + +type Z_CreateUserAccessTokenReturns struct { + A *model.UserAccessToken + B *model.AppError +} + +func (g *apiRPCClient) CreateUserAccessToken(token *model.UserAccessToken) (*model.UserAccessToken, *model.AppError) { + _args := &Z_CreateUserAccessTokenArgs{token} + _returns := &Z_CreateUserAccessTokenReturns{} + if err := g.client.Call("Plugin.CreateUserAccessToken", _args, _returns); err != nil { + log.Printf("RPC call to CreateUserAccessToken API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) CreateUserAccessToken(args *Z_CreateUserAccessTokenArgs, returns *Z_CreateUserAccessTokenReturns) error { + if hook, ok := s.impl.(interface { + CreateUserAccessToken(token *model.UserAccessToken) (*model.UserAccessToken, *model.AppError) + }); ok { + returns.A, returns.B = hook.CreateUserAccessToken(args.A) + } else { + return encodableError(fmt.Errorf("API CreateUserAccessToken called but not implemented.")) + } + return nil +} + +type Z_RevokeUserAccessTokenArgs struct { + A string +} + +type Z_RevokeUserAccessTokenReturns struct { + A *model.AppError +} + +func (g *apiRPCClient) RevokeUserAccessToken(tokenID string) *model.AppError { + _args := &Z_RevokeUserAccessTokenArgs{tokenID} + _returns := &Z_RevokeUserAccessTokenReturns{} + if err := g.client.Call("Plugin.RevokeUserAccessToken", _args, _returns); err != nil { + log.Printf("RPC call to RevokeUserAccessToken API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) RevokeUserAccessToken(args *Z_RevokeUserAccessTokenArgs, returns *Z_RevokeUserAccessTokenReturns) error { + if hook, ok := s.impl.(interface { + RevokeUserAccessToken(tokenID string) *model.AppError + }); ok { + returns.A = hook.RevokeUserAccessToken(args.A) + } else { + return encodableError(fmt.Errorf("API RevokeUserAccessToken called but not implemented.")) + } + return nil +} + +type Z_GetTeamIconArgs struct { + A string +} + +type Z_GetTeamIconReturns struct { + A []byte + B *model.AppError +} + +func (g *apiRPCClient) GetTeamIcon(teamID string) ([]byte, *model.AppError) { + _args := &Z_GetTeamIconArgs{teamID} + _returns := &Z_GetTeamIconReturns{} + if err := g.client.Call("Plugin.GetTeamIcon", _args, _returns); err != nil { + log.Printf("RPC call to GetTeamIcon API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetTeamIcon(args *Z_GetTeamIconArgs, returns *Z_GetTeamIconReturns) error { + if hook, ok := s.impl.(interface { + GetTeamIcon(teamID string) ([]byte, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetTeamIcon(args.A) + } else { + return encodableError(fmt.Errorf("API GetTeamIcon called but not implemented.")) + } + return nil +} + +type Z_SetTeamIconArgs struct { + A string + B []byte +} + +type Z_SetTeamIconReturns struct { + A *model.AppError +} + +func (g *apiRPCClient) SetTeamIcon(teamID string, data []byte) *model.AppError { + _args := &Z_SetTeamIconArgs{teamID, data} + _returns := &Z_SetTeamIconReturns{} + if err := g.client.Call("Plugin.SetTeamIcon", _args, _returns); err != nil { + log.Printf("RPC call to SetTeamIcon API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) SetTeamIcon(args *Z_SetTeamIconArgs, returns *Z_SetTeamIconReturns) error { + if hook, ok := s.impl.(interface { + SetTeamIcon(teamID string, data []byte) *model.AppError + }); ok { + returns.A = hook.SetTeamIcon(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API SetTeamIcon called but not implemented.")) + } + return nil +} + +type Z_RemoveTeamIconArgs struct { + A string +} + +type Z_RemoveTeamIconReturns struct { + A *model.AppError +} + +func (g *apiRPCClient) RemoveTeamIcon(teamID string) *model.AppError { + _args := &Z_RemoveTeamIconArgs{teamID} + _returns := &Z_RemoveTeamIconReturns{} + if err := g.client.Call("Plugin.RemoveTeamIcon", _args, _returns); err != nil { + log.Printf("RPC call to RemoveTeamIcon API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) RemoveTeamIcon(args *Z_RemoveTeamIconArgs, returns *Z_RemoveTeamIconReturns) error { + if hook, ok := s.impl.(interface { + RemoveTeamIcon(teamID string) *model.AppError + }); ok { + returns.A = hook.RemoveTeamIcon(args.A) + } else { + return encodableError(fmt.Errorf("API RemoveTeamIcon called but not implemented.")) + } + return nil +} + +type Z_UpdateUserArgs struct { + A *model.User +} + +type Z_UpdateUserReturns struct { + A *model.User + B *model.AppError +} + +func (g *apiRPCClient) UpdateUser(user *model.User) (*model.User, *model.AppError) { + _args := &Z_UpdateUserArgs{user} + _returns := &Z_UpdateUserReturns{} + if err := g.client.Call("Plugin.UpdateUser", _args, _returns); err != nil { + log.Printf("RPC call to UpdateUser API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) UpdateUser(args *Z_UpdateUserArgs, returns *Z_UpdateUserReturns) error { + if hook, ok := s.impl.(interface { + UpdateUser(user *model.User) (*model.User, *model.AppError) + }); ok { + returns.A, returns.B = hook.UpdateUser(args.A) + } else { + return encodableError(fmt.Errorf("API UpdateUser called but not implemented.")) + } + return nil +} + +type Z_GetUserStatusArgs struct { + A string +} + +type Z_GetUserStatusReturns struct { + A *model.Status + B *model.AppError +} + +func (g *apiRPCClient) GetUserStatus(userID string) (*model.Status, *model.AppError) { + _args := &Z_GetUserStatusArgs{userID} + _returns := &Z_GetUserStatusReturns{} + if err := g.client.Call("Plugin.GetUserStatus", _args, _returns); err != nil { + log.Printf("RPC call to GetUserStatus API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetUserStatus(args *Z_GetUserStatusArgs, returns *Z_GetUserStatusReturns) error { + if hook, ok := s.impl.(interface { + GetUserStatus(userID string) (*model.Status, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetUserStatus(args.A) + } else { + return encodableError(fmt.Errorf("API GetUserStatus called but not implemented.")) + } + return nil +} + +type Z_GetUserStatusesByIdsArgs struct { + A []string +} + +type Z_GetUserStatusesByIdsReturns struct { + A []*model.Status + B *model.AppError +} + +func (g *apiRPCClient) GetUserStatusesByIds(userIds []string) ([]*model.Status, *model.AppError) { + _args := &Z_GetUserStatusesByIdsArgs{userIds} + _returns := &Z_GetUserStatusesByIdsReturns{} + if err := g.client.Call("Plugin.GetUserStatusesByIds", _args, _returns); err != nil { + log.Printf("RPC call to GetUserStatusesByIds API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetUserStatusesByIds(args *Z_GetUserStatusesByIdsArgs, returns *Z_GetUserStatusesByIdsReturns) error { + if hook, ok := s.impl.(interface { + GetUserStatusesByIds(userIds []string) ([]*model.Status, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetUserStatusesByIds(args.A) + } else { + return encodableError(fmt.Errorf("API GetUserStatusesByIds called but not implemented.")) + } + return nil +} + +type Z_UpdateUserStatusArgs struct { + A string + B string +} + +type Z_UpdateUserStatusReturns struct { + A *model.Status + B *model.AppError +} + +func (g *apiRPCClient) UpdateUserStatus(userID, status string) (*model.Status, *model.AppError) { + _args := &Z_UpdateUserStatusArgs{userID, status} + _returns := &Z_UpdateUserStatusReturns{} + if err := g.client.Call("Plugin.UpdateUserStatus", _args, _returns); err != nil { + log.Printf("RPC call to UpdateUserStatus API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) UpdateUserStatus(args *Z_UpdateUserStatusArgs, returns *Z_UpdateUserStatusReturns) error { + if hook, ok := s.impl.(interface { + UpdateUserStatus(userID, status string) (*model.Status, *model.AppError) + }); ok { + returns.A, returns.B = hook.UpdateUserStatus(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API UpdateUserStatus called but not implemented.")) + } + return nil +} + +type Z_SetUserStatusTimedDNDArgs struct { + A string + B int64 +} + +type Z_SetUserStatusTimedDNDReturns struct { + A *model.Status + B *model.AppError +} + +func (g *apiRPCClient) SetUserStatusTimedDND(userId string, endtime int64) (*model.Status, *model.AppError) { + _args := &Z_SetUserStatusTimedDNDArgs{userId, endtime} + _returns := &Z_SetUserStatusTimedDNDReturns{} + if err := g.client.Call("Plugin.SetUserStatusTimedDND", _args, _returns); err != nil { + log.Printf("RPC call to SetUserStatusTimedDND API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) SetUserStatusTimedDND(args *Z_SetUserStatusTimedDNDArgs, returns *Z_SetUserStatusTimedDNDReturns) error { + if hook, ok := s.impl.(interface { + SetUserStatusTimedDND(userId string, endtime int64) (*model.Status, *model.AppError) + }); ok { + returns.A, returns.B = hook.SetUserStatusTimedDND(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API SetUserStatusTimedDND called but not implemented.")) + } + return nil +} + +type Z_UpdateUserActiveArgs struct { + A string + B bool +} + +type Z_UpdateUserActiveReturns struct { + A *model.AppError +} + +func (g *apiRPCClient) UpdateUserActive(userID string, active bool) *model.AppError { + _args := &Z_UpdateUserActiveArgs{userID, active} + _returns := &Z_UpdateUserActiveReturns{} + if err := g.client.Call("Plugin.UpdateUserActive", _args, _returns); err != nil { + log.Printf("RPC call to UpdateUserActive API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) UpdateUserActive(args *Z_UpdateUserActiveArgs, returns *Z_UpdateUserActiveReturns) error { + if hook, ok := s.impl.(interface { + UpdateUserActive(userID string, active bool) *model.AppError + }); ok { + returns.A = hook.UpdateUserActive(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API UpdateUserActive called but not implemented.")) + } + return nil +} + +type Z_UpdateUserCustomStatusArgs struct { + A string + B *model.CustomStatus +} + +type Z_UpdateUserCustomStatusReturns struct { + A *model.AppError +} + +func (g *apiRPCClient) UpdateUserCustomStatus(userID string, customStatus *model.CustomStatus) *model.AppError { + _args := &Z_UpdateUserCustomStatusArgs{userID, customStatus} + _returns := &Z_UpdateUserCustomStatusReturns{} + if err := g.client.Call("Plugin.UpdateUserCustomStatus", _args, _returns); err != nil { + log.Printf("RPC call to UpdateUserCustomStatus API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) UpdateUserCustomStatus(args *Z_UpdateUserCustomStatusArgs, returns *Z_UpdateUserCustomStatusReturns) error { + if hook, ok := s.impl.(interface { + UpdateUserCustomStatus(userID string, customStatus *model.CustomStatus) *model.AppError + }); ok { + returns.A = hook.UpdateUserCustomStatus(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API UpdateUserCustomStatus called but not implemented.")) + } + return nil +} + +type Z_RemoveUserCustomStatusArgs struct { + A string +} + +type Z_RemoveUserCustomStatusReturns struct { + A *model.AppError +} + +func (g *apiRPCClient) RemoveUserCustomStatus(userID string) *model.AppError { + _args := &Z_RemoveUserCustomStatusArgs{userID} + _returns := &Z_RemoveUserCustomStatusReturns{} + if err := g.client.Call("Plugin.RemoveUserCustomStatus", _args, _returns); err != nil { + log.Printf("RPC call to RemoveUserCustomStatus API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) RemoveUserCustomStatus(args *Z_RemoveUserCustomStatusArgs, returns *Z_RemoveUserCustomStatusReturns) error { + if hook, ok := s.impl.(interface { + RemoveUserCustomStatus(userID string) *model.AppError + }); ok { + returns.A = hook.RemoveUserCustomStatus(args.A) + } else { + return encodableError(fmt.Errorf("API RemoveUserCustomStatus called but not implemented.")) + } + return nil +} + +type Z_GetUsersInChannelArgs struct { + A string + B string + C int + D int +} + +type Z_GetUsersInChannelReturns struct { + A []*model.User + B *model.AppError +} + +func (g *apiRPCClient) GetUsersInChannel(channelID, sortBy string, page, perPage int) ([]*model.User, *model.AppError) { + _args := &Z_GetUsersInChannelArgs{channelID, sortBy, page, perPage} + _returns := &Z_GetUsersInChannelReturns{} + if err := g.client.Call("Plugin.GetUsersInChannel", _args, _returns); err != nil { + log.Printf("RPC call to GetUsersInChannel API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetUsersInChannel(args *Z_GetUsersInChannelArgs, returns *Z_GetUsersInChannelReturns) error { + if hook, ok := s.impl.(interface { + GetUsersInChannel(channelID, sortBy string, page, perPage int) ([]*model.User, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetUsersInChannel(args.A, args.B, args.C, args.D) + } else { + return encodableError(fmt.Errorf("API GetUsersInChannel called but not implemented.")) + } + return nil +} + +type Z_GetLDAPUserAttributesArgs struct { + A string + B []string +} + +type Z_GetLDAPUserAttributesReturns struct { + A map[string]string + B *model.AppError +} + +func (g *apiRPCClient) GetLDAPUserAttributes(userID string, attributes []string) (map[string]string, *model.AppError) { + _args := &Z_GetLDAPUserAttributesArgs{userID, attributes} + _returns := &Z_GetLDAPUserAttributesReturns{} + if err := g.client.Call("Plugin.GetLDAPUserAttributes", _args, _returns); err != nil { + log.Printf("RPC call to GetLDAPUserAttributes API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetLDAPUserAttributes(args *Z_GetLDAPUserAttributesArgs, returns *Z_GetLDAPUserAttributesReturns) error { + if hook, ok := s.impl.(interface { + GetLDAPUserAttributes(userID string, attributes []string) (map[string]string, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetLDAPUserAttributes(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API GetLDAPUserAttributes called but not implemented.")) + } + return nil +} + +type Z_CreateTeamArgs struct { + A *model.Team +} + +type Z_CreateTeamReturns struct { + A *model.Team + B *model.AppError +} + +func (g *apiRPCClient) CreateTeam(team *model.Team) (*model.Team, *model.AppError) { + _args := &Z_CreateTeamArgs{team} + _returns := &Z_CreateTeamReturns{} + if err := g.client.Call("Plugin.CreateTeam", _args, _returns); err != nil { + log.Printf("RPC call to CreateTeam API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) CreateTeam(args *Z_CreateTeamArgs, returns *Z_CreateTeamReturns) error { + if hook, ok := s.impl.(interface { + CreateTeam(team *model.Team) (*model.Team, *model.AppError) + }); ok { + returns.A, returns.B = hook.CreateTeam(args.A) + } else { + return encodableError(fmt.Errorf("API CreateTeam called but not implemented.")) + } + return nil +} + +type Z_DeleteTeamArgs struct { + A string +} + +type Z_DeleteTeamReturns struct { + A *model.AppError +} + +func (g *apiRPCClient) DeleteTeam(teamID string) *model.AppError { + _args := &Z_DeleteTeamArgs{teamID} + _returns := &Z_DeleteTeamReturns{} + if err := g.client.Call("Plugin.DeleteTeam", _args, _returns); err != nil { + log.Printf("RPC call to DeleteTeam API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) DeleteTeam(args *Z_DeleteTeamArgs, returns *Z_DeleteTeamReturns) error { + if hook, ok := s.impl.(interface { + DeleteTeam(teamID string) *model.AppError + }); ok { + returns.A = hook.DeleteTeam(args.A) + } else { + return encodableError(fmt.Errorf("API DeleteTeam called but not implemented.")) + } + return nil +} + +type Z_GetTeamsArgs struct { +} + +type Z_GetTeamsReturns struct { + A []*model.Team + B *model.AppError +} + +func (g *apiRPCClient) GetTeams() ([]*model.Team, *model.AppError) { + _args := &Z_GetTeamsArgs{} + _returns := &Z_GetTeamsReturns{} + if err := g.client.Call("Plugin.GetTeams", _args, _returns); err != nil { + log.Printf("RPC call to GetTeams API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetTeams(args *Z_GetTeamsArgs, returns *Z_GetTeamsReturns) error { + if hook, ok := s.impl.(interface { + GetTeams() ([]*model.Team, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetTeams() + } else { + return encodableError(fmt.Errorf("API GetTeams called but not implemented.")) + } + return nil +} + +type Z_GetTeamArgs struct { + A string +} + +type Z_GetTeamReturns struct { + A *model.Team + B *model.AppError +} + +func (g *apiRPCClient) GetTeam(teamID string) (*model.Team, *model.AppError) { + _args := &Z_GetTeamArgs{teamID} + _returns := &Z_GetTeamReturns{} + if err := g.client.Call("Plugin.GetTeam", _args, _returns); err != nil { + log.Printf("RPC call to GetTeam API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetTeam(args *Z_GetTeamArgs, returns *Z_GetTeamReturns) error { + if hook, ok := s.impl.(interface { + GetTeam(teamID string) (*model.Team, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetTeam(args.A) + } else { + return encodableError(fmt.Errorf("API GetTeam called but not implemented.")) + } + return nil +} + +type Z_GetTeamByNameArgs struct { + A string +} + +type Z_GetTeamByNameReturns struct { + A *model.Team + B *model.AppError +} + +func (g *apiRPCClient) GetTeamByName(name string) (*model.Team, *model.AppError) { + _args := &Z_GetTeamByNameArgs{name} + _returns := &Z_GetTeamByNameReturns{} + if err := g.client.Call("Plugin.GetTeamByName", _args, _returns); err != nil { + log.Printf("RPC call to GetTeamByName API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetTeamByName(args *Z_GetTeamByNameArgs, returns *Z_GetTeamByNameReturns) error { + if hook, ok := s.impl.(interface { + GetTeamByName(name string) (*model.Team, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetTeamByName(args.A) + } else { + return encodableError(fmt.Errorf("API GetTeamByName called but not implemented.")) + } + return nil +} + +type Z_GetTeamsUnreadForUserArgs struct { + A string +} + +type Z_GetTeamsUnreadForUserReturns struct { + A []*model.TeamUnread + B *model.AppError +} + +func (g *apiRPCClient) GetTeamsUnreadForUser(userID string) ([]*model.TeamUnread, *model.AppError) { + _args := &Z_GetTeamsUnreadForUserArgs{userID} + _returns := &Z_GetTeamsUnreadForUserReturns{} + if err := g.client.Call("Plugin.GetTeamsUnreadForUser", _args, _returns); err != nil { + log.Printf("RPC call to GetTeamsUnreadForUser API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetTeamsUnreadForUser(args *Z_GetTeamsUnreadForUserArgs, returns *Z_GetTeamsUnreadForUserReturns) error { + if hook, ok := s.impl.(interface { + GetTeamsUnreadForUser(userID string) ([]*model.TeamUnread, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetTeamsUnreadForUser(args.A) + } else { + return encodableError(fmt.Errorf("API GetTeamsUnreadForUser called but not implemented.")) + } + return nil +} + +type Z_UpdateTeamArgs struct { + A *model.Team +} + +type Z_UpdateTeamReturns struct { + A *model.Team + B *model.AppError +} + +func (g *apiRPCClient) UpdateTeam(team *model.Team) (*model.Team, *model.AppError) { + _args := &Z_UpdateTeamArgs{team} + _returns := &Z_UpdateTeamReturns{} + if err := g.client.Call("Plugin.UpdateTeam", _args, _returns); err != nil { + log.Printf("RPC call to UpdateTeam API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) UpdateTeam(args *Z_UpdateTeamArgs, returns *Z_UpdateTeamReturns) error { + if hook, ok := s.impl.(interface { + UpdateTeam(team *model.Team) (*model.Team, *model.AppError) + }); ok { + returns.A, returns.B = hook.UpdateTeam(args.A) + } else { + return encodableError(fmt.Errorf("API UpdateTeam called but not implemented.")) + } + return nil +} + +type Z_SearchTeamsArgs struct { + A string +} + +type Z_SearchTeamsReturns struct { + A []*model.Team + B *model.AppError +} + +func (g *apiRPCClient) SearchTeams(term string) ([]*model.Team, *model.AppError) { + _args := &Z_SearchTeamsArgs{term} + _returns := &Z_SearchTeamsReturns{} + if err := g.client.Call("Plugin.SearchTeams", _args, _returns); err != nil { + log.Printf("RPC call to SearchTeams API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) SearchTeams(args *Z_SearchTeamsArgs, returns *Z_SearchTeamsReturns) error { + if hook, ok := s.impl.(interface { + SearchTeams(term string) ([]*model.Team, *model.AppError) + }); ok { + returns.A, returns.B = hook.SearchTeams(args.A) + } else { + return encodableError(fmt.Errorf("API SearchTeams called but not implemented.")) + } + return nil +} + +type Z_GetTeamsForUserArgs struct { + A string +} + +type Z_GetTeamsForUserReturns struct { + A []*model.Team + B *model.AppError +} + +func (g *apiRPCClient) GetTeamsForUser(userID string) ([]*model.Team, *model.AppError) { + _args := &Z_GetTeamsForUserArgs{userID} + _returns := &Z_GetTeamsForUserReturns{} + if err := g.client.Call("Plugin.GetTeamsForUser", _args, _returns); err != nil { + log.Printf("RPC call to GetTeamsForUser API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetTeamsForUser(args *Z_GetTeamsForUserArgs, returns *Z_GetTeamsForUserReturns) error { + if hook, ok := s.impl.(interface { + GetTeamsForUser(userID string) ([]*model.Team, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetTeamsForUser(args.A) + } else { + return encodableError(fmt.Errorf("API GetTeamsForUser called but not implemented.")) + } + return nil +} + +type Z_CreateTeamMemberArgs struct { + A string + B string +} + +type Z_CreateTeamMemberReturns struct { + A *model.TeamMember + B *model.AppError +} + +func (g *apiRPCClient) CreateTeamMember(teamID, userID string) (*model.TeamMember, *model.AppError) { + _args := &Z_CreateTeamMemberArgs{teamID, userID} + _returns := &Z_CreateTeamMemberReturns{} + if err := g.client.Call("Plugin.CreateTeamMember", _args, _returns); err != nil { + log.Printf("RPC call to CreateTeamMember API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) CreateTeamMember(args *Z_CreateTeamMemberArgs, returns *Z_CreateTeamMemberReturns) error { + if hook, ok := s.impl.(interface { + CreateTeamMember(teamID, userID string) (*model.TeamMember, *model.AppError) + }); ok { + returns.A, returns.B = hook.CreateTeamMember(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API CreateTeamMember called but not implemented.")) + } + return nil +} + +type Z_CreateTeamMembersArgs struct { + A string + B []string + C string +} + +type Z_CreateTeamMembersReturns struct { + A []*model.TeamMember + B *model.AppError +} + +func (g *apiRPCClient) CreateTeamMembers(teamID string, userIds []string, requestorId string) ([]*model.TeamMember, *model.AppError) { + _args := &Z_CreateTeamMembersArgs{teamID, userIds, requestorId} + _returns := &Z_CreateTeamMembersReturns{} + if err := g.client.Call("Plugin.CreateTeamMembers", _args, _returns); err != nil { + log.Printf("RPC call to CreateTeamMembers API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) CreateTeamMembers(args *Z_CreateTeamMembersArgs, returns *Z_CreateTeamMembersReturns) error { + if hook, ok := s.impl.(interface { + CreateTeamMembers(teamID string, userIds []string, requestorId string) ([]*model.TeamMember, *model.AppError) + }); ok { + returns.A, returns.B = hook.CreateTeamMembers(args.A, args.B, args.C) + } else { + return encodableError(fmt.Errorf("API CreateTeamMembers called but not implemented.")) + } + return nil +} + +type Z_CreateTeamMembersGracefullyArgs struct { + A string + B []string + C string +} + +type Z_CreateTeamMembersGracefullyReturns struct { + A []*model.TeamMemberWithError + B *model.AppError +} + +func (g *apiRPCClient) CreateTeamMembersGracefully(teamID string, userIds []string, requestorId string) ([]*model.TeamMemberWithError, *model.AppError) { + _args := &Z_CreateTeamMembersGracefullyArgs{teamID, userIds, requestorId} + _returns := &Z_CreateTeamMembersGracefullyReturns{} + if err := g.client.Call("Plugin.CreateTeamMembersGracefully", _args, _returns); err != nil { + log.Printf("RPC call to CreateTeamMembersGracefully API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) CreateTeamMembersGracefully(args *Z_CreateTeamMembersGracefullyArgs, returns *Z_CreateTeamMembersGracefullyReturns) error { + if hook, ok := s.impl.(interface { + CreateTeamMembersGracefully(teamID string, userIds []string, requestorId string) ([]*model.TeamMemberWithError, *model.AppError) + }); ok { + returns.A, returns.B = hook.CreateTeamMembersGracefully(args.A, args.B, args.C) + } else { + return encodableError(fmt.Errorf("API CreateTeamMembersGracefully called but not implemented.")) + } + return nil +} + +type Z_DeleteTeamMemberArgs struct { + A string + B string + C string +} + +type Z_DeleteTeamMemberReturns struct { + A *model.AppError +} + +func (g *apiRPCClient) DeleteTeamMember(teamID, userID, requestorId string) *model.AppError { + _args := &Z_DeleteTeamMemberArgs{teamID, userID, requestorId} + _returns := &Z_DeleteTeamMemberReturns{} + if err := g.client.Call("Plugin.DeleteTeamMember", _args, _returns); err != nil { + log.Printf("RPC call to DeleteTeamMember API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) DeleteTeamMember(args *Z_DeleteTeamMemberArgs, returns *Z_DeleteTeamMemberReturns) error { + if hook, ok := s.impl.(interface { + DeleteTeamMember(teamID, userID, requestorId string) *model.AppError + }); ok { + returns.A = hook.DeleteTeamMember(args.A, args.B, args.C) + } else { + return encodableError(fmt.Errorf("API DeleteTeamMember called but not implemented.")) + } + return nil +} + +type Z_GetTeamMembersArgs struct { + A string + B int + C int +} + +type Z_GetTeamMembersReturns struct { + A []*model.TeamMember + B *model.AppError +} + +func (g *apiRPCClient) GetTeamMembers(teamID string, page, perPage int) ([]*model.TeamMember, *model.AppError) { + _args := &Z_GetTeamMembersArgs{teamID, page, perPage} + _returns := &Z_GetTeamMembersReturns{} + if err := g.client.Call("Plugin.GetTeamMembers", _args, _returns); err != nil { + log.Printf("RPC call to GetTeamMembers API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetTeamMembers(args *Z_GetTeamMembersArgs, returns *Z_GetTeamMembersReturns) error { + if hook, ok := s.impl.(interface { + GetTeamMembers(teamID string, page, perPage int) ([]*model.TeamMember, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetTeamMembers(args.A, args.B, args.C) + } else { + return encodableError(fmt.Errorf("API GetTeamMembers called but not implemented.")) + } + return nil +} + +type Z_GetTeamMemberArgs struct { + A string + B string +} + +type Z_GetTeamMemberReturns struct { + A *model.TeamMember + B *model.AppError +} + +func (g *apiRPCClient) GetTeamMember(teamID, userID string) (*model.TeamMember, *model.AppError) { + _args := &Z_GetTeamMemberArgs{teamID, userID} + _returns := &Z_GetTeamMemberReturns{} + if err := g.client.Call("Plugin.GetTeamMember", _args, _returns); err != nil { + log.Printf("RPC call to GetTeamMember API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetTeamMember(args *Z_GetTeamMemberArgs, returns *Z_GetTeamMemberReturns) error { + if hook, ok := s.impl.(interface { + GetTeamMember(teamID, userID string) (*model.TeamMember, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetTeamMember(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API GetTeamMember called but not implemented.")) + } + return nil +} + +type Z_GetTeamMembersForUserArgs struct { + A string + B int + C int +} + +type Z_GetTeamMembersForUserReturns struct { + A []*model.TeamMember + B *model.AppError +} + +func (g *apiRPCClient) GetTeamMembersForUser(userID string, page int, perPage int) ([]*model.TeamMember, *model.AppError) { + _args := &Z_GetTeamMembersForUserArgs{userID, page, perPage} + _returns := &Z_GetTeamMembersForUserReturns{} + if err := g.client.Call("Plugin.GetTeamMembersForUser", _args, _returns); err != nil { + log.Printf("RPC call to GetTeamMembersForUser API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetTeamMembersForUser(args *Z_GetTeamMembersForUserArgs, returns *Z_GetTeamMembersForUserReturns) error { + if hook, ok := s.impl.(interface { + GetTeamMembersForUser(userID string, page int, perPage int) ([]*model.TeamMember, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetTeamMembersForUser(args.A, args.B, args.C) + } else { + return encodableError(fmt.Errorf("API GetTeamMembersForUser called but not implemented.")) + } + return nil +} + +type Z_UpdateTeamMemberRolesArgs struct { + A string + B string + C string +} + +type Z_UpdateTeamMemberRolesReturns struct { + A *model.TeamMember + B *model.AppError +} + +func (g *apiRPCClient) UpdateTeamMemberRoles(teamID, userID, newRoles string) (*model.TeamMember, *model.AppError) { + _args := &Z_UpdateTeamMemberRolesArgs{teamID, userID, newRoles} + _returns := &Z_UpdateTeamMemberRolesReturns{} + if err := g.client.Call("Plugin.UpdateTeamMemberRoles", _args, _returns); err != nil { + log.Printf("RPC call to UpdateTeamMemberRoles API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) UpdateTeamMemberRoles(args *Z_UpdateTeamMemberRolesArgs, returns *Z_UpdateTeamMemberRolesReturns) error { + if hook, ok := s.impl.(interface { + UpdateTeamMemberRoles(teamID, userID, newRoles string) (*model.TeamMember, *model.AppError) + }); ok { + returns.A, returns.B = hook.UpdateTeamMemberRoles(args.A, args.B, args.C) + } else { + return encodableError(fmt.Errorf("API UpdateTeamMemberRoles called but not implemented.")) + } + return nil +} + +type Z_CreateChannelArgs struct { + A *model.Channel +} + +type Z_CreateChannelReturns struct { + A *model.Channel + B *model.AppError +} + +func (g *apiRPCClient) CreateChannel(channel *model.Channel) (*model.Channel, *model.AppError) { + _args := &Z_CreateChannelArgs{channel} + _returns := &Z_CreateChannelReturns{} + if err := g.client.Call("Plugin.CreateChannel", _args, _returns); err != nil { + log.Printf("RPC call to CreateChannel API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) CreateChannel(args *Z_CreateChannelArgs, returns *Z_CreateChannelReturns) error { + if hook, ok := s.impl.(interface { + CreateChannel(channel *model.Channel) (*model.Channel, *model.AppError) + }); ok { + returns.A, returns.B = hook.CreateChannel(args.A) + } else { + return encodableError(fmt.Errorf("API CreateChannel called but not implemented.")) + } + return nil +} + +type Z_DeleteChannelArgs struct { + A string +} + +type Z_DeleteChannelReturns struct { + A *model.AppError +} + +func (g *apiRPCClient) DeleteChannel(channelId string) *model.AppError { + _args := &Z_DeleteChannelArgs{channelId} + _returns := &Z_DeleteChannelReturns{} + if err := g.client.Call("Plugin.DeleteChannel", _args, _returns); err != nil { + log.Printf("RPC call to DeleteChannel API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) DeleteChannel(args *Z_DeleteChannelArgs, returns *Z_DeleteChannelReturns) error { + if hook, ok := s.impl.(interface { + DeleteChannel(channelId string) *model.AppError + }); ok { + returns.A = hook.DeleteChannel(args.A) + } else { + return encodableError(fmt.Errorf("API DeleteChannel called but not implemented.")) + } + return nil +} + +type Z_GetPublicChannelsForTeamArgs struct { + A string + B int + C int +} + +type Z_GetPublicChannelsForTeamReturns struct { + A []*model.Channel + B *model.AppError +} + +func (g *apiRPCClient) GetPublicChannelsForTeam(teamID string, page, perPage int) ([]*model.Channel, *model.AppError) { + _args := &Z_GetPublicChannelsForTeamArgs{teamID, page, perPage} + _returns := &Z_GetPublicChannelsForTeamReturns{} + if err := g.client.Call("Plugin.GetPublicChannelsForTeam", _args, _returns); err != nil { + log.Printf("RPC call to GetPublicChannelsForTeam API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetPublicChannelsForTeam(args *Z_GetPublicChannelsForTeamArgs, returns *Z_GetPublicChannelsForTeamReturns) error { + if hook, ok := s.impl.(interface { + GetPublicChannelsForTeam(teamID string, page, perPage int) ([]*model.Channel, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetPublicChannelsForTeam(args.A, args.B, args.C) + } else { + return encodableError(fmt.Errorf("API GetPublicChannelsForTeam called but not implemented.")) + } + return nil +} + +type Z_GetChannelArgs struct { + A string +} + +type Z_GetChannelReturns struct { + A *model.Channel + B *model.AppError +} + +func (g *apiRPCClient) GetChannel(channelId string) (*model.Channel, *model.AppError) { + _args := &Z_GetChannelArgs{channelId} + _returns := &Z_GetChannelReturns{} + if err := g.client.Call("Plugin.GetChannel", _args, _returns); err != nil { + log.Printf("RPC call to GetChannel API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetChannel(args *Z_GetChannelArgs, returns *Z_GetChannelReturns) error { + if hook, ok := s.impl.(interface { + GetChannel(channelId string) (*model.Channel, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetChannel(args.A) + } else { + return encodableError(fmt.Errorf("API GetChannel called but not implemented.")) + } + return nil +} + +type Z_GetChannelByNameArgs struct { + A string + B string + C bool +} + +type Z_GetChannelByNameReturns struct { + A *model.Channel + B *model.AppError +} + +func (g *apiRPCClient) GetChannelByName(teamID, name string, includeDeleted bool) (*model.Channel, *model.AppError) { + _args := &Z_GetChannelByNameArgs{teamID, name, includeDeleted} + _returns := &Z_GetChannelByNameReturns{} + if err := g.client.Call("Plugin.GetChannelByName", _args, _returns); err != nil { + log.Printf("RPC call to GetChannelByName API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetChannelByName(args *Z_GetChannelByNameArgs, returns *Z_GetChannelByNameReturns) error { + if hook, ok := s.impl.(interface { + GetChannelByName(teamID, name string, includeDeleted bool) (*model.Channel, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetChannelByName(args.A, args.B, args.C) + } else { + return encodableError(fmt.Errorf("API GetChannelByName called but not implemented.")) + } + return nil +} + +type Z_GetChannelByNameForTeamNameArgs struct { + A string + B string + C bool +} + +type Z_GetChannelByNameForTeamNameReturns struct { + A *model.Channel + B *model.AppError +} + +func (g *apiRPCClient) GetChannelByNameForTeamName(teamName, channelName string, includeDeleted bool) (*model.Channel, *model.AppError) { + _args := &Z_GetChannelByNameForTeamNameArgs{teamName, channelName, includeDeleted} + _returns := &Z_GetChannelByNameForTeamNameReturns{} + if err := g.client.Call("Plugin.GetChannelByNameForTeamName", _args, _returns); err != nil { + log.Printf("RPC call to GetChannelByNameForTeamName API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetChannelByNameForTeamName(args *Z_GetChannelByNameForTeamNameArgs, returns *Z_GetChannelByNameForTeamNameReturns) error { + if hook, ok := s.impl.(interface { + GetChannelByNameForTeamName(teamName, channelName string, includeDeleted bool) (*model.Channel, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetChannelByNameForTeamName(args.A, args.B, args.C) + } else { + return encodableError(fmt.Errorf("API GetChannelByNameForTeamName called but not implemented.")) + } + return nil +} + +type Z_GetChannelsForTeamForUserArgs struct { + A string + B string + C bool +} + +type Z_GetChannelsForTeamForUserReturns struct { + A []*model.Channel + B *model.AppError +} + +func (g *apiRPCClient) GetChannelsForTeamForUser(teamID, userID string, includeDeleted bool) ([]*model.Channel, *model.AppError) { + _args := &Z_GetChannelsForTeamForUserArgs{teamID, userID, includeDeleted} + _returns := &Z_GetChannelsForTeamForUserReturns{} + if err := g.client.Call("Plugin.GetChannelsForTeamForUser", _args, _returns); err != nil { + log.Printf("RPC call to GetChannelsForTeamForUser API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetChannelsForTeamForUser(args *Z_GetChannelsForTeamForUserArgs, returns *Z_GetChannelsForTeamForUserReturns) error { + if hook, ok := s.impl.(interface { + GetChannelsForTeamForUser(teamID, userID string, includeDeleted bool) ([]*model.Channel, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetChannelsForTeamForUser(args.A, args.B, args.C) + } else { + return encodableError(fmt.Errorf("API GetChannelsForTeamForUser called but not implemented.")) + } + return nil +} + +type Z_GetChannelStatsArgs struct { + A string +} + +type Z_GetChannelStatsReturns struct { + A *model.ChannelStats + B *model.AppError +} + +func (g *apiRPCClient) GetChannelStats(channelId string) (*model.ChannelStats, *model.AppError) { + _args := &Z_GetChannelStatsArgs{channelId} + _returns := &Z_GetChannelStatsReturns{} + if err := g.client.Call("Plugin.GetChannelStats", _args, _returns); err != nil { + log.Printf("RPC call to GetChannelStats API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetChannelStats(args *Z_GetChannelStatsArgs, returns *Z_GetChannelStatsReturns) error { + if hook, ok := s.impl.(interface { + GetChannelStats(channelId string) (*model.ChannelStats, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetChannelStats(args.A) + } else { + return encodableError(fmt.Errorf("API GetChannelStats called but not implemented.")) + } + return nil +} + +type Z_GetDirectChannelArgs struct { + A string + B string +} + +type Z_GetDirectChannelReturns struct { + A *model.Channel + B *model.AppError +} + +func (g *apiRPCClient) GetDirectChannel(userId1, userId2 string) (*model.Channel, *model.AppError) { + _args := &Z_GetDirectChannelArgs{userId1, userId2} + _returns := &Z_GetDirectChannelReturns{} + if err := g.client.Call("Plugin.GetDirectChannel", _args, _returns); err != nil { + log.Printf("RPC call to GetDirectChannel API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetDirectChannel(args *Z_GetDirectChannelArgs, returns *Z_GetDirectChannelReturns) error { + if hook, ok := s.impl.(interface { + GetDirectChannel(userId1, userId2 string) (*model.Channel, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetDirectChannel(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API GetDirectChannel called but not implemented.")) + } + return nil +} + +type Z_GetGroupChannelArgs struct { + A []string +} + +type Z_GetGroupChannelReturns struct { + A *model.Channel + B *model.AppError +} + +func (g *apiRPCClient) GetGroupChannel(userIds []string) (*model.Channel, *model.AppError) { + _args := &Z_GetGroupChannelArgs{userIds} + _returns := &Z_GetGroupChannelReturns{} + if err := g.client.Call("Plugin.GetGroupChannel", _args, _returns); err != nil { + log.Printf("RPC call to GetGroupChannel API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetGroupChannel(args *Z_GetGroupChannelArgs, returns *Z_GetGroupChannelReturns) error { + if hook, ok := s.impl.(interface { + GetGroupChannel(userIds []string) (*model.Channel, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetGroupChannel(args.A) + } else { + return encodableError(fmt.Errorf("API GetGroupChannel called but not implemented.")) + } + return nil +} + +type Z_UpdateChannelArgs struct { + A *model.Channel +} + +type Z_UpdateChannelReturns struct { + A *model.Channel + B *model.AppError +} + +func (g *apiRPCClient) UpdateChannel(channel *model.Channel) (*model.Channel, *model.AppError) { + _args := &Z_UpdateChannelArgs{channel} + _returns := &Z_UpdateChannelReturns{} + if err := g.client.Call("Plugin.UpdateChannel", _args, _returns); err != nil { + log.Printf("RPC call to UpdateChannel API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) UpdateChannel(args *Z_UpdateChannelArgs, returns *Z_UpdateChannelReturns) error { + if hook, ok := s.impl.(interface { + UpdateChannel(channel *model.Channel) (*model.Channel, *model.AppError) + }); ok { + returns.A, returns.B = hook.UpdateChannel(args.A) + } else { + return encodableError(fmt.Errorf("API UpdateChannel called but not implemented.")) + } + return nil +} + +type Z_SearchChannelsArgs struct { + A string + B string +} + +type Z_SearchChannelsReturns struct { + A []*model.Channel + B *model.AppError +} + +func (g *apiRPCClient) SearchChannels(teamID string, term string) ([]*model.Channel, *model.AppError) { + _args := &Z_SearchChannelsArgs{teamID, term} + _returns := &Z_SearchChannelsReturns{} + if err := g.client.Call("Plugin.SearchChannels", _args, _returns); err != nil { + log.Printf("RPC call to SearchChannels API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) SearchChannels(args *Z_SearchChannelsArgs, returns *Z_SearchChannelsReturns) error { + if hook, ok := s.impl.(interface { + SearchChannels(teamID string, term string) ([]*model.Channel, *model.AppError) + }); ok { + returns.A, returns.B = hook.SearchChannels(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API SearchChannels called but not implemented.")) + } + return nil +} + +type Z_CreateChannelSidebarCategoryArgs struct { + A string + B string + C *model.SidebarCategoryWithChannels +} + +type Z_CreateChannelSidebarCategoryReturns struct { + A *model.SidebarCategoryWithChannels + B *model.AppError +} + +func (g *apiRPCClient) CreateChannelSidebarCategory(userID, teamID string, newCategory *model.SidebarCategoryWithChannels) (*model.SidebarCategoryWithChannels, *model.AppError) { + _args := &Z_CreateChannelSidebarCategoryArgs{userID, teamID, newCategory} + _returns := &Z_CreateChannelSidebarCategoryReturns{} + if err := g.client.Call("Plugin.CreateChannelSidebarCategory", _args, _returns); err != nil { + log.Printf("RPC call to CreateChannelSidebarCategory API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) CreateChannelSidebarCategory(args *Z_CreateChannelSidebarCategoryArgs, returns *Z_CreateChannelSidebarCategoryReturns) error { + if hook, ok := s.impl.(interface { + CreateChannelSidebarCategory(userID, teamID string, newCategory *model.SidebarCategoryWithChannels) (*model.SidebarCategoryWithChannels, *model.AppError) + }); ok { + returns.A, returns.B = hook.CreateChannelSidebarCategory(args.A, args.B, args.C) + } else { + return encodableError(fmt.Errorf("API CreateChannelSidebarCategory called but not implemented.")) + } + return nil +} + +type Z_GetChannelSidebarCategoriesArgs struct { + A string + B string +} + +type Z_GetChannelSidebarCategoriesReturns struct { + A *model.OrderedSidebarCategories + B *model.AppError +} + +func (g *apiRPCClient) GetChannelSidebarCategories(userID, teamID string) (*model.OrderedSidebarCategories, *model.AppError) { + _args := &Z_GetChannelSidebarCategoriesArgs{userID, teamID} + _returns := &Z_GetChannelSidebarCategoriesReturns{} + if err := g.client.Call("Plugin.GetChannelSidebarCategories", _args, _returns); err != nil { + log.Printf("RPC call to GetChannelSidebarCategories API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetChannelSidebarCategories(args *Z_GetChannelSidebarCategoriesArgs, returns *Z_GetChannelSidebarCategoriesReturns) error { + if hook, ok := s.impl.(interface { + GetChannelSidebarCategories(userID, teamID string) (*model.OrderedSidebarCategories, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetChannelSidebarCategories(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API GetChannelSidebarCategories called but not implemented.")) + } + return nil +} + +type Z_UpdateChannelSidebarCategoriesArgs struct { + A string + B string + C []*model.SidebarCategoryWithChannels +} + +type Z_UpdateChannelSidebarCategoriesReturns struct { + A []*model.SidebarCategoryWithChannels + B *model.AppError +} + +func (g *apiRPCClient) UpdateChannelSidebarCategories(userID, teamID string, categories []*model.SidebarCategoryWithChannels) ([]*model.SidebarCategoryWithChannels, *model.AppError) { + _args := &Z_UpdateChannelSidebarCategoriesArgs{userID, teamID, categories} + _returns := &Z_UpdateChannelSidebarCategoriesReturns{} + if err := g.client.Call("Plugin.UpdateChannelSidebarCategories", _args, _returns); err != nil { + log.Printf("RPC call to UpdateChannelSidebarCategories API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) UpdateChannelSidebarCategories(args *Z_UpdateChannelSidebarCategoriesArgs, returns *Z_UpdateChannelSidebarCategoriesReturns) error { + if hook, ok := s.impl.(interface { + UpdateChannelSidebarCategories(userID, teamID string, categories []*model.SidebarCategoryWithChannels) ([]*model.SidebarCategoryWithChannels, *model.AppError) + }); ok { + returns.A, returns.B = hook.UpdateChannelSidebarCategories(args.A, args.B, args.C) + } else { + return encodableError(fmt.Errorf("API UpdateChannelSidebarCategories called but not implemented.")) + } + return nil +} + +type Z_SearchUsersArgs struct { + A *model.UserSearch +} + +type Z_SearchUsersReturns struct { + A []*model.User + B *model.AppError +} + +func (g *apiRPCClient) SearchUsers(search *model.UserSearch) ([]*model.User, *model.AppError) { + _args := &Z_SearchUsersArgs{search} + _returns := &Z_SearchUsersReturns{} + if err := g.client.Call("Plugin.SearchUsers", _args, _returns); err != nil { + log.Printf("RPC call to SearchUsers API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) SearchUsers(args *Z_SearchUsersArgs, returns *Z_SearchUsersReturns) error { + if hook, ok := s.impl.(interface { + SearchUsers(search *model.UserSearch) ([]*model.User, *model.AppError) + }); ok { + returns.A, returns.B = hook.SearchUsers(args.A) + } else { + return encodableError(fmt.Errorf("API SearchUsers called but not implemented.")) + } + return nil +} + +type Z_SearchPostsInTeamArgs struct { + A string + B []*model.SearchParams +} + +type Z_SearchPostsInTeamReturns struct { + A []*model.Post + B *model.AppError +} + +func (g *apiRPCClient) SearchPostsInTeam(teamID string, paramsList []*model.SearchParams) ([]*model.Post, *model.AppError) { + _args := &Z_SearchPostsInTeamArgs{teamID, paramsList} + _returns := &Z_SearchPostsInTeamReturns{} + if err := g.client.Call("Plugin.SearchPostsInTeam", _args, _returns); err != nil { + log.Printf("RPC call to SearchPostsInTeam API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) SearchPostsInTeam(args *Z_SearchPostsInTeamArgs, returns *Z_SearchPostsInTeamReturns) error { + if hook, ok := s.impl.(interface { + SearchPostsInTeam(teamID string, paramsList []*model.SearchParams) ([]*model.Post, *model.AppError) + }); ok { + returns.A, returns.B = hook.SearchPostsInTeam(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API SearchPostsInTeam called but not implemented.")) + } + return nil +} + +type Z_SearchPostsInTeamForUserArgs struct { + A string + B string + C model.SearchParameter +} + +type Z_SearchPostsInTeamForUserReturns struct { + A *model.PostSearchResults + B *model.AppError +} + +func (g *apiRPCClient) SearchPostsInTeamForUser(teamID string, userID string, searchParams model.SearchParameter) (*model.PostSearchResults, *model.AppError) { + _args := &Z_SearchPostsInTeamForUserArgs{teamID, userID, searchParams} + _returns := &Z_SearchPostsInTeamForUserReturns{} + if err := g.client.Call("Plugin.SearchPostsInTeamForUser", _args, _returns); err != nil { + log.Printf("RPC call to SearchPostsInTeamForUser API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) SearchPostsInTeamForUser(args *Z_SearchPostsInTeamForUserArgs, returns *Z_SearchPostsInTeamForUserReturns) error { + if hook, ok := s.impl.(interface { + SearchPostsInTeamForUser(teamID string, userID string, searchParams model.SearchParameter) (*model.PostSearchResults, *model.AppError) + }); ok { + returns.A, returns.B = hook.SearchPostsInTeamForUser(args.A, args.B, args.C) + } else { + return encodableError(fmt.Errorf("API SearchPostsInTeamForUser called but not implemented.")) + } + return nil +} + +type Z_AddChannelMemberArgs struct { + A string + B string +} + +type Z_AddChannelMemberReturns struct { + A *model.ChannelMember + B *model.AppError +} + +func (g *apiRPCClient) AddChannelMember(channelId, userID string) (*model.ChannelMember, *model.AppError) { + _args := &Z_AddChannelMemberArgs{channelId, userID} + _returns := &Z_AddChannelMemberReturns{} + if err := g.client.Call("Plugin.AddChannelMember", _args, _returns); err != nil { + log.Printf("RPC call to AddChannelMember API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) AddChannelMember(args *Z_AddChannelMemberArgs, returns *Z_AddChannelMemberReturns) error { + if hook, ok := s.impl.(interface { + AddChannelMember(channelId, userID string) (*model.ChannelMember, *model.AppError) + }); ok { + returns.A, returns.B = hook.AddChannelMember(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API AddChannelMember called but not implemented.")) + } + return nil +} + +type Z_AddUserToChannelArgs struct { + A string + B string + C string +} + +type Z_AddUserToChannelReturns struct { + A *model.ChannelMember + B *model.AppError +} + +func (g *apiRPCClient) AddUserToChannel(channelId, userID, asUserId string) (*model.ChannelMember, *model.AppError) { + _args := &Z_AddUserToChannelArgs{channelId, userID, asUserId} + _returns := &Z_AddUserToChannelReturns{} + if err := g.client.Call("Plugin.AddUserToChannel", _args, _returns); err != nil { + log.Printf("RPC call to AddUserToChannel API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) AddUserToChannel(args *Z_AddUserToChannelArgs, returns *Z_AddUserToChannelReturns) error { + if hook, ok := s.impl.(interface { + AddUserToChannel(channelId, userID, asUserId string) (*model.ChannelMember, *model.AppError) + }); ok { + returns.A, returns.B = hook.AddUserToChannel(args.A, args.B, args.C) + } else { + return encodableError(fmt.Errorf("API AddUserToChannel called but not implemented.")) + } + return nil +} + +type Z_GetChannelMemberArgs struct { + A string + B string +} + +type Z_GetChannelMemberReturns struct { + A *model.ChannelMember + B *model.AppError +} + +func (g *apiRPCClient) GetChannelMember(channelId, userID string) (*model.ChannelMember, *model.AppError) { + _args := &Z_GetChannelMemberArgs{channelId, userID} + _returns := &Z_GetChannelMemberReturns{} + if err := g.client.Call("Plugin.GetChannelMember", _args, _returns); err != nil { + log.Printf("RPC call to GetChannelMember API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetChannelMember(args *Z_GetChannelMemberArgs, returns *Z_GetChannelMemberReturns) error { + if hook, ok := s.impl.(interface { + GetChannelMember(channelId, userID string) (*model.ChannelMember, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetChannelMember(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API GetChannelMember called but not implemented.")) + } + return nil +} + +type Z_GetChannelMembersArgs struct { + A string + B int + C int +} + +type Z_GetChannelMembersReturns struct { + A model.ChannelMembers + B *model.AppError +} + +func (g *apiRPCClient) GetChannelMembers(channelId string, page, perPage int) (model.ChannelMembers, *model.AppError) { + _args := &Z_GetChannelMembersArgs{channelId, page, perPage} + _returns := &Z_GetChannelMembersReturns{} + if err := g.client.Call("Plugin.GetChannelMembers", _args, _returns); err != nil { + log.Printf("RPC call to GetChannelMembers API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetChannelMembers(args *Z_GetChannelMembersArgs, returns *Z_GetChannelMembersReturns) error { + if hook, ok := s.impl.(interface { + GetChannelMembers(channelId string, page, perPage int) (model.ChannelMembers, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetChannelMembers(args.A, args.B, args.C) + } else { + return encodableError(fmt.Errorf("API GetChannelMembers called but not implemented.")) + } + return nil +} + +type Z_GetChannelMembersByIdsArgs struct { + A string + B []string +} + +type Z_GetChannelMembersByIdsReturns struct { + A model.ChannelMembers + B *model.AppError +} + +func (g *apiRPCClient) GetChannelMembersByIds(channelId string, userIds []string) (model.ChannelMembers, *model.AppError) { + _args := &Z_GetChannelMembersByIdsArgs{channelId, userIds} + _returns := &Z_GetChannelMembersByIdsReturns{} + if err := g.client.Call("Plugin.GetChannelMembersByIds", _args, _returns); err != nil { + log.Printf("RPC call to GetChannelMembersByIds API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetChannelMembersByIds(args *Z_GetChannelMembersByIdsArgs, returns *Z_GetChannelMembersByIdsReturns) error { + if hook, ok := s.impl.(interface { + GetChannelMembersByIds(channelId string, userIds []string) (model.ChannelMembers, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetChannelMembersByIds(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API GetChannelMembersByIds called but not implemented.")) + } + return nil +} + +type Z_GetChannelMembersForUserArgs struct { + A string + B string + C int + D int +} + +type Z_GetChannelMembersForUserReturns struct { + A []*model.ChannelMember + B *model.AppError +} + +func (g *apiRPCClient) GetChannelMembersForUser(teamID, userID string, page, perPage int) ([]*model.ChannelMember, *model.AppError) { + _args := &Z_GetChannelMembersForUserArgs{teamID, userID, page, perPage} + _returns := &Z_GetChannelMembersForUserReturns{} + if err := g.client.Call("Plugin.GetChannelMembersForUser", _args, _returns); err != nil { + log.Printf("RPC call to GetChannelMembersForUser API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetChannelMembersForUser(args *Z_GetChannelMembersForUserArgs, returns *Z_GetChannelMembersForUserReturns) error { + if hook, ok := s.impl.(interface { + GetChannelMembersForUser(teamID, userID string, page, perPage int) ([]*model.ChannelMember, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetChannelMembersForUser(args.A, args.B, args.C, args.D) + } else { + return encodableError(fmt.Errorf("API GetChannelMembersForUser called but not implemented.")) + } + return nil +} + +type Z_UpdateChannelMemberRolesArgs struct { + A string + B string + C string +} + +type Z_UpdateChannelMemberRolesReturns struct { + A *model.ChannelMember + B *model.AppError +} + +func (g *apiRPCClient) UpdateChannelMemberRoles(channelId, userID, newRoles string) (*model.ChannelMember, *model.AppError) { + _args := &Z_UpdateChannelMemberRolesArgs{channelId, userID, newRoles} + _returns := &Z_UpdateChannelMemberRolesReturns{} + if err := g.client.Call("Plugin.UpdateChannelMemberRoles", _args, _returns); err != nil { + log.Printf("RPC call to UpdateChannelMemberRoles API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) UpdateChannelMemberRoles(args *Z_UpdateChannelMemberRolesArgs, returns *Z_UpdateChannelMemberRolesReturns) error { + if hook, ok := s.impl.(interface { + UpdateChannelMemberRoles(channelId, userID, newRoles string) (*model.ChannelMember, *model.AppError) + }); ok { + returns.A, returns.B = hook.UpdateChannelMemberRoles(args.A, args.B, args.C) + } else { + return encodableError(fmt.Errorf("API UpdateChannelMemberRoles called but not implemented.")) + } + return nil +} + +type Z_UpdateChannelMemberNotificationsArgs struct { + A string + B string + C map[string]string +} + +type Z_UpdateChannelMemberNotificationsReturns struct { + A *model.ChannelMember + B *model.AppError +} + +func (g *apiRPCClient) UpdateChannelMemberNotifications(channelId, userID string, notifications map[string]string) (*model.ChannelMember, *model.AppError) { + _args := &Z_UpdateChannelMemberNotificationsArgs{channelId, userID, notifications} + _returns := &Z_UpdateChannelMemberNotificationsReturns{} + if err := g.client.Call("Plugin.UpdateChannelMemberNotifications", _args, _returns); err != nil { + log.Printf("RPC call to UpdateChannelMemberNotifications API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) UpdateChannelMemberNotifications(args *Z_UpdateChannelMemberNotificationsArgs, returns *Z_UpdateChannelMemberNotificationsReturns) error { + if hook, ok := s.impl.(interface { + UpdateChannelMemberNotifications(channelId, userID string, notifications map[string]string) (*model.ChannelMember, *model.AppError) + }); ok { + returns.A, returns.B = hook.UpdateChannelMemberNotifications(args.A, args.B, args.C) + } else { + return encodableError(fmt.Errorf("API UpdateChannelMemberNotifications called but not implemented.")) + } + return nil +} + +type Z_PatchChannelMembersNotificationsArgs struct { + A []*model.ChannelMemberIdentifier + B map[string]string +} + +type Z_PatchChannelMembersNotificationsReturns struct { + A *model.AppError +} + +func (g *apiRPCClient) PatchChannelMembersNotifications(members []*model.ChannelMemberIdentifier, notifyProps map[string]string) *model.AppError { + _args := &Z_PatchChannelMembersNotificationsArgs{members, notifyProps} + _returns := &Z_PatchChannelMembersNotificationsReturns{} + if err := g.client.Call("Plugin.PatchChannelMembersNotifications", _args, _returns); err != nil { + log.Printf("RPC call to PatchChannelMembersNotifications API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) PatchChannelMembersNotifications(args *Z_PatchChannelMembersNotificationsArgs, returns *Z_PatchChannelMembersNotificationsReturns) error { + if hook, ok := s.impl.(interface { + PatchChannelMembersNotifications(members []*model.ChannelMemberIdentifier, notifyProps map[string]string) *model.AppError + }); ok { + returns.A = hook.PatchChannelMembersNotifications(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API PatchChannelMembersNotifications called but not implemented.")) + } + return nil +} + +type Z_GetGroupArgs struct { + A string +} + +type Z_GetGroupReturns struct { + A *model.Group + B *model.AppError +} + +func (g *apiRPCClient) GetGroup(groupId string) (*model.Group, *model.AppError) { + _args := &Z_GetGroupArgs{groupId} + _returns := &Z_GetGroupReturns{} + if err := g.client.Call("Plugin.GetGroup", _args, _returns); err != nil { + log.Printf("RPC call to GetGroup API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetGroup(args *Z_GetGroupArgs, returns *Z_GetGroupReturns) error { + if hook, ok := s.impl.(interface { + GetGroup(groupId string) (*model.Group, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetGroup(args.A) + } else { + return encodableError(fmt.Errorf("API GetGroup called but not implemented.")) + } + return nil +} + +type Z_GetGroupByNameArgs struct { + A string +} + +type Z_GetGroupByNameReturns struct { + A *model.Group + B *model.AppError +} + +func (g *apiRPCClient) GetGroupByName(name string) (*model.Group, *model.AppError) { + _args := &Z_GetGroupByNameArgs{name} + _returns := &Z_GetGroupByNameReturns{} + if err := g.client.Call("Plugin.GetGroupByName", _args, _returns); err != nil { + log.Printf("RPC call to GetGroupByName API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetGroupByName(args *Z_GetGroupByNameArgs, returns *Z_GetGroupByNameReturns) error { + if hook, ok := s.impl.(interface { + GetGroupByName(name string) (*model.Group, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetGroupByName(args.A) + } else { + return encodableError(fmt.Errorf("API GetGroupByName called but not implemented.")) + } + return nil +} + +type Z_GetGroupMemberUsersArgs struct { + A string + B int + C int +} + +type Z_GetGroupMemberUsersReturns struct { + A []*model.User + B *model.AppError +} + +func (g *apiRPCClient) GetGroupMemberUsers(groupID string, page, perPage int) ([]*model.User, *model.AppError) { + _args := &Z_GetGroupMemberUsersArgs{groupID, page, perPage} + _returns := &Z_GetGroupMemberUsersReturns{} + if err := g.client.Call("Plugin.GetGroupMemberUsers", _args, _returns); err != nil { + log.Printf("RPC call to GetGroupMemberUsers API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetGroupMemberUsers(args *Z_GetGroupMemberUsersArgs, returns *Z_GetGroupMemberUsersReturns) error { + if hook, ok := s.impl.(interface { + GetGroupMemberUsers(groupID string, page, perPage int) ([]*model.User, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetGroupMemberUsers(args.A, args.B, args.C) + } else { + return encodableError(fmt.Errorf("API GetGroupMemberUsers called but not implemented.")) + } + return nil +} + +type Z_GetGroupsBySourceArgs struct { + A model.GroupSource +} + +type Z_GetGroupsBySourceReturns struct { + A []*model.Group + B *model.AppError +} + +func (g *apiRPCClient) GetGroupsBySource(groupSource model.GroupSource) ([]*model.Group, *model.AppError) { + _args := &Z_GetGroupsBySourceArgs{groupSource} + _returns := &Z_GetGroupsBySourceReturns{} + if err := g.client.Call("Plugin.GetGroupsBySource", _args, _returns); err != nil { + log.Printf("RPC call to GetGroupsBySource API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetGroupsBySource(args *Z_GetGroupsBySourceArgs, returns *Z_GetGroupsBySourceReturns) error { + if hook, ok := s.impl.(interface { + GetGroupsBySource(groupSource model.GroupSource) ([]*model.Group, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetGroupsBySource(args.A) + } else { + return encodableError(fmt.Errorf("API GetGroupsBySource called but not implemented.")) + } + return nil +} + +type Z_GetGroupsForUserArgs struct { + A string +} + +type Z_GetGroupsForUserReturns struct { + A []*model.Group + B *model.AppError +} + +func (g *apiRPCClient) GetGroupsForUser(userID string) ([]*model.Group, *model.AppError) { + _args := &Z_GetGroupsForUserArgs{userID} + _returns := &Z_GetGroupsForUserReturns{} + if err := g.client.Call("Plugin.GetGroupsForUser", _args, _returns); err != nil { + log.Printf("RPC call to GetGroupsForUser API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetGroupsForUser(args *Z_GetGroupsForUserArgs, returns *Z_GetGroupsForUserReturns) error { + if hook, ok := s.impl.(interface { + GetGroupsForUser(userID string) ([]*model.Group, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetGroupsForUser(args.A) + } else { + return encodableError(fmt.Errorf("API GetGroupsForUser called but not implemented.")) + } + return nil +} + +type Z_DeleteChannelMemberArgs struct { + A string + B string +} + +type Z_DeleteChannelMemberReturns struct { + A *model.AppError +} + +func (g *apiRPCClient) DeleteChannelMember(channelId, userID string) *model.AppError { + _args := &Z_DeleteChannelMemberArgs{channelId, userID} + _returns := &Z_DeleteChannelMemberReturns{} + if err := g.client.Call("Plugin.DeleteChannelMember", _args, _returns); err != nil { + log.Printf("RPC call to DeleteChannelMember API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) DeleteChannelMember(args *Z_DeleteChannelMemberArgs, returns *Z_DeleteChannelMemberReturns) error { + if hook, ok := s.impl.(interface { + DeleteChannelMember(channelId, userID string) *model.AppError + }); ok { + returns.A = hook.DeleteChannelMember(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API DeleteChannelMember called but not implemented.")) + } + return nil +} + +type Z_CreatePostArgs struct { + A *model.Post +} + +type Z_CreatePostReturns struct { + A *model.Post + B *model.AppError +} + +func (g *apiRPCClient) CreatePost(post *model.Post) (*model.Post, *model.AppError) { + _args := &Z_CreatePostArgs{post} + _returns := &Z_CreatePostReturns{} + if err := g.client.Call("Plugin.CreatePost", _args, _returns); err != nil { + log.Printf("RPC call to CreatePost API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) CreatePost(args *Z_CreatePostArgs, returns *Z_CreatePostReturns) error { + if hook, ok := s.impl.(interface { + CreatePost(post *model.Post) (*model.Post, *model.AppError) + }); ok { + returns.A, returns.B = hook.CreatePost(args.A) + } else { + return encodableError(fmt.Errorf("API CreatePost called but not implemented.")) + } + return nil +} + +type Z_AddReactionArgs struct { + A *model.Reaction +} + +type Z_AddReactionReturns struct { + A *model.Reaction + B *model.AppError +} + +func (g *apiRPCClient) AddReaction(reaction *model.Reaction) (*model.Reaction, *model.AppError) { + _args := &Z_AddReactionArgs{reaction} + _returns := &Z_AddReactionReturns{} + if err := g.client.Call("Plugin.AddReaction", _args, _returns); err != nil { + log.Printf("RPC call to AddReaction API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) AddReaction(args *Z_AddReactionArgs, returns *Z_AddReactionReturns) error { + if hook, ok := s.impl.(interface { + AddReaction(reaction *model.Reaction) (*model.Reaction, *model.AppError) + }); ok { + returns.A, returns.B = hook.AddReaction(args.A) + } else { + return encodableError(fmt.Errorf("API AddReaction called but not implemented.")) + } + return nil +} + +type Z_RemoveReactionArgs struct { + A *model.Reaction +} + +type Z_RemoveReactionReturns struct { + A *model.AppError +} + +func (g *apiRPCClient) RemoveReaction(reaction *model.Reaction) *model.AppError { + _args := &Z_RemoveReactionArgs{reaction} + _returns := &Z_RemoveReactionReturns{} + if err := g.client.Call("Plugin.RemoveReaction", _args, _returns); err != nil { + log.Printf("RPC call to RemoveReaction API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) RemoveReaction(args *Z_RemoveReactionArgs, returns *Z_RemoveReactionReturns) error { + if hook, ok := s.impl.(interface { + RemoveReaction(reaction *model.Reaction) *model.AppError + }); ok { + returns.A = hook.RemoveReaction(args.A) + } else { + return encodableError(fmt.Errorf("API RemoveReaction called but not implemented.")) + } + return nil +} + +type Z_GetReactionsArgs struct { + A string +} + +type Z_GetReactionsReturns struct { + A []*model.Reaction + B *model.AppError +} + +func (g *apiRPCClient) GetReactions(postId string) ([]*model.Reaction, *model.AppError) { + _args := &Z_GetReactionsArgs{postId} + _returns := &Z_GetReactionsReturns{} + if err := g.client.Call("Plugin.GetReactions", _args, _returns); err != nil { + log.Printf("RPC call to GetReactions API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetReactions(args *Z_GetReactionsArgs, returns *Z_GetReactionsReturns) error { + if hook, ok := s.impl.(interface { + GetReactions(postId string) ([]*model.Reaction, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetReactions(args.A) + } else { + return encodableError(fmt.Errorf("API GetReactions called but not implemented.")) + } + return nil +} + +type Z_SendEphemeralPostArgs struct { + A string + B *model.Post +} + +type Z_SendEphemeralPostReturns struct { + A *model.Post +} + +func (g *apiRPCClient) SendEphemeralPost(userID string, post *model.Post) *model.Post { + _args := &Z_SendEphemeralPostArgs{userID, post} + _returns := &Z_SendEphemeralPostReturns{} + if err := g.client.Call("Plugin.SendEphemeralPost", _args, _returns); err != nil { + log.Printf("RPC call to SendEphemeralPost API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) SendEphemeralPost(args *Z_SendEphemeralPostArgs, returns *Z_SendEphemeralPostReturns) error { + if hook, ok := s.impl.(interface { + SendEphemeralPost(userID string, post *model.Post) *model.Post + }); ok { + returns.A = hook.SendEphemeralPost(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API SendEphemeralPost called but not implemented.")) + } + return nil +} + +type Z_UpdateEphemeralPostArgs struct { + A string + B *model.Post +} + +type Z_UpdateEphemeralPostReturns struct { + A *model.Post +} + +func (g *apiRPCClient) UpdateEphemeralPost(userID string, post *model.Post) *model.Post { + _args := &Z_UpdateEphemeralPostArgs{userID, post} + _returns := &Z_UpdateEphemeralPostReturns{} + if err := g.client.Call("Plugin.UpdateEphemeralPost", _args, _returns); err != nil { + log.Printf("RPC call to UpdateEphemeralPost API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) UpdateEphemeralPost(args *Z_UpdateEphemeralPostArgs, returns *Z_UpdateEphemeralPostReturns) error { + if hook, ok := s.impl.(interface { + UpdateEphemeralPost(userID string, post *model.Post) *model.Post + }); ok { + returns.A = hook.UpdateEphemeralPost(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API UpdateEphemeralPost called but not implemented.")) + } + return nil +} + +type Z_DeleteEphemeralPostArgs struct { + A string + B string +} + +type Z_DeleteEphemeralPostReturns struct { +} + +func (g *apiRPCClient) DeleteEphemeralPost(userID, postId string) { + _args := &Z_DeleteEphemeralPostArgs{userID, postId} + _returns := &Z_DeleteEphemeralPostReturns{} + if err := g.client.Call("Plugin.DeleteEphemeralPost", _args, _returns); err != nil { + log.Printf("RPC call to DeleteEphemeralPost API failed: %s", err.Error()) + } + +} + +func (s *apiRPCServer) DeleteEphemeralPost(args *Z_DeleteEphemeralPostArgs, returns *Z_DeleteEphemeralPostReturns) error { + if hook, ok := s.impl.(interface { + DeleteEphemeralPost(userID, postId string) + }); ok { + hook.DeleteEphemeralPost(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API DeleteEphemeralPost called but not implemented.")) + } + return nil +} + +type Z_DeletePostArgs struct { + A string +} + +type Z_DeletePostReturns struct { + A *model.AppError +} + +func (g *apiRPCClient) DeletePost(postId string) *model.AppError { + _args := &Z_DeletePostArgs{postId} + _returns := &Z_DeletePostReturns{} + if err := g.client.Call("Plugin.DeletePost", _args, _returns); err != nil { + log.Printf("RPC call to DeletePost API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) DeletePost(args *Z_DeletePostArgs, returns *Z_DeletePostReturns) error { + if hook, ok := s.impl.(interface { + DeletePost(postId string) *model.AppError + }); ok { + returns.A = hook.DeletePost(args.A) + } else { + return encodableError(fmt.Errorf("API DeletePost called but not implemented.")) + } + return nil +} + +type Z_GetPostThreadArgs struct { + A string +} + +type Z_GetPostThreadReturns struct { + A *model.PostList + B *model.AppError +} + +func (g *apiRPCClient) GetPostThread(postId string) (*model.PostList, *model.AppError) { + _args := &Z_GetPostThreadArgs{postId} + _returns := &Z_GetPostThreadReturns{} + if err := g.client.Call("Plugin.GetPostThread", _args, _returns); err != nil { + log.Printf("RPC call to GetPostThread API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetPostThread(args *Z_GetPostThreadArgs, returns *Z_GetPostThreadReturns) error { + if hook, ok := s.impl.(interface { + GetPostThread(postId string) (*model.PostList, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetPostThread(args.A) + } else { + return encodableError(fmt.Errorf("API GetPostThread called but not implemented.")) + } + return nil +} + +type Z_GetPostArgs struct { + A string +} + +type Z_GetPostReturns struct { + A *model.Post + B *model.AppError +} + +func (g *apiRPCClient) GetPost(postId string) (*model.Post, *model.AppError) { + _args := &Z_GetPostArgs{postId} + _returns := &Z_GetPostReturns{} + if err := g.client.Call("Plugin.GetPost", _args, _returns); err != nil { + log.Printf("RPC call to GetPost API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetPost(args *Z_GetPostArgs, returns *Z_GetPostReturns) error { + if hook, ok := s.impl.(interface { + GetPost(postId string) (*model.Post, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetPost(args.A) + } else { + return encodableError(fmt.Errorf("API GetPost called but not implemented.")) + } + return nil +} + +type Z_GetPostsSinceArgs struct { + A string + B int64 +} + +type Z_GetPostsSinceReturns struct { + A *model.PostList + B *model.AppError +} + +func (g *apiRPCClient) GetPostsSince(channelId string, time int64) (*model.PostList, *model.AppError) { + _args := &Z_GetPostsSinceArgs{channelId, time} + _returns := &Z_GetPostsSinceReturns{} + if err := g.client.Call("Plugin.GetPostsSince", _args, _returns); err != nil { + log.Printf("RPC call to GetPostsSince API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetPostsSince(args *Z_GetPostsSinceArgs, returns *Z_GetPostsSinceReturns) error { + if hook, ok := s.impl.(interface { + GetPostsSince(channelId string, time int64) (*model.PostList, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetPostsSince(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API GetPostsSince called but not implemented.")) + } + return nil +} + +type Z_GetPostsAfterArgs struct { + A string + B string + C int + D int +} + +type Z_GetPostsAfterReturns struct { + A *model.PostList + B *model.AppError +} + +func (g *apiRPCClient) GetPostsAfter(channelId, postId string, page, perPage int) (*model.PostList, *model.AppError) { + _args := &Z_GetPostsAfterArgs{channelId, postId, page, perPage} + _returns := &Z_GetPostsAfterReturns{} + if err := g.client.Call("Plugin.GetPostsAfter", _args, _returns); err != nil { + log.Printf("RPC call to GetPostsAfter API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetPostsAfter(args *Z_GetPostsAfterArgs, returns *Z_GetPostsAfterReturns) error { + if hook, ok := s.impl.(interface { + GetPostsAfter(channelId, postId string, page, perPage int) (*model.PostList, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetPostsAfter(args.A, args.B, args.C, args.D) + } else { + return encodableError(fmt.Errorf("API GetPostsAfter called but not implemented.")) + } + return nil +} + +type Z_GetPostsBeforeArgs struct { + A string + B string + C int + D int +} + +type Z_GetPostsBeforeReturns struct { + A *model.PostList + B *model.AppError +} + +func (g *apiRPCClient) GetPostsBefore(channelId, postId string, page, perPage int) (*model.PostList, *model.AppError) { + _args := &Z_GetPostsBeforeArgs{channelId, postId, page, perPage} + _returns := &Z_GetPostsBeforeReturns{} + if err := g.client.Call("Plugin.GetPostsBefore", _args, _returns); err != nil { + log.Printf("RPC call to GetPostsBefore API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetPostsBefore(args *Z_GetPostsBeforeArgs, returns *Z_GetPostsBeforeReturns) error { + if hook, ok := s.impl.(interface { + GetPostsBefore(channelId, postId string, page, perPage int) (*model.PostList, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetPostsBefore(args.A, args.B, args.C, args.D) + } else { + return encodableError(fmt.Errorf("API GetPostsBefore called but not implemented.")) + } + return nil +} + +type Z_GetPostsForChannelArgs struct { + A string + B int + C int +} + +type Z_GetPostsForChannelReturns struct { + A *model.PostList + B *model.AppError +} + +func (g *apiRPCClient) GetPostsForChannel(channelId string, page, perPage int) (*model.PostList, *model.AppError) { + _args := &Z_GetPostsForChannelArgs{channelId, page, perPage} + _returns := &Z_GetPostsForChannelReturns{} + if err := g.client.Call("Plugin.GetPostsForChannel", _args, _returns); err != nil { + log.Printf("RPC call to GetPostsForChannel API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetPostsForChannel(args *Z_GetPostsForChannelArgs, returns *Z_GetPostsForChannelReturns) error { + if hook, ok := s.impl.(interface { + GetPostsForChannel(channelId string, page, perPage int) (*model.PostList, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetPostsForChannel(args.A, args.B, args.C) + } else { + return encodableError(fmt.Errorf("API GetPostsForChannel called but not implemented.")) + } + return nil +} + +type Z_GetTeamStatsArgs struct { + A string +} + +type Z_GetTeamStatsReturns struct { + A *model.TeamStats + B *model.AppError +} + +func (g *apiRPCClient) GetTeamStats(teamID string) (*model.TeamStats, *model.AppError) { + _args := &Z_GetTeamStatsArgs{teamID} + _returns := &Z_GetTeamStatsReturns{} + if err := g.client.Call("Plugin.GetTeamStats", _args, _returns); err != nil { + log.Printf("RPC call to GetTeamStats API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetTeamStats(args *Z_GetTeamStatsArgs, returns *Z_GetTeamStatsReturns) error { + if hook, ok := s.impl.(interface { + GetTeamStats(teamID string) (*model.TeamStats, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetTeamStats(args.A) + } else { + return encodableError(fmt.Errorf("API GetTeamStats called but not implemented.")) + } + return nil +} + +type Z_UpdatePostArgs struct { + A *model.Post +} + +type Z_UpdatePostReturns struct { + A *model.Post + B *model.AppError +} + +func (g *apiRPCClient) UpdatePost(post *model.Post) (*model.Post, *model.AppError) { + _args := &Z_UpdatePostArgs{post} + _returns := &Z_UpdatePostReturns{} + if err := g.client.Call("Plugin.UpdatePost", _args, _returns); err != nil { + log.Printf("RPC call to UpdatePost API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) UpdatePost(args *Z_UpdatePostArgs, returns *Z_UpdatePostReturns) error { + if hook, ok := s.impl.(interface { + UpdatePost(post *model.Post) (*model.Post, *model.AppError) + }); ok { + returns.A, returns.B = hook.UpdatePost(args.A) + } else { + return encodableError(fmt.Errorf("API UpdatePost called but not implemented.")) + } + return nil +} + +type Z_GetProfileImageArgs struct { + A string +} + +type Z_GetProfileImageReturns struct { + A []byte + B *model.AppError +} + +func (g *apiRPCClient) GetProfileImage(userID string) ([]byte, *model.AppError) { + _args := &Z_GetProfileImageArgs{userID} + _returns := &Z_GetProfileImageReturns{} + if err := g.client.Call("Plugin.GetProfileImage", _args, _returns); err != nil { + log.Printf("RPC call to GetProfileImage API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetProfileImage(args *Z_GetProfileImageArgs, returns *Z_GetProfileImageReturns) error { + if hook, ok := s.impl.(interface { + GetProfileImage(userID string) ([]byte, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetProfileImage(args.A) + } else { + return encodableError(fmt.Errorf("API GetProfileImage called but not implemented.")) + } + return nil +} + +type Z_SetProfileImageArgs struct { + A string + B []byte +} + +type Z_SetProfileImageReturns struct { + A *model.AppError +} + +func (g *apiRPCClient) SetProfileImage(userID string, data []byte) *model.AppError { + _args := &Z_SetProfileImageArgs{userID, data} + _returns := &Z_SetProfileImageReturns{} + if err := g.client.Call("Plugin.SetProfileImage", _args, _returns); err != nil { + log.Printf("RPC call to SetProfileImage API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) SetProfileImage(args *Z_SetProfileImageArgs, returns *Z_SetProfileImageReturns) error { + if hook, ok := s.impl.(interface { + SetProfileImage(userID string, data []byte) *model.AppError + }); ok { + returns.A = hook.SetProfileImage(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API SetProfileImage called but not implemented.")) + } + return nil +} + +type Z_GetEmojiListArgs struct { + A string + B int + C int +} + +type Z_GetEmojiListReturns struct { + A []*model.Emoji + B *model.AppError +} + +func (g *apiRPCClient) GetEmojiList(sortBy string, page, perPage int) ([]*model.Emoji, *model.AppError) { + _args := &Z_GetEmojiListArgs{sortBy, page, perPage} + _returns := &Z_GetEmojiListReturns{} + if err := g.client.Call("Plugin.GetEmojiList", _args, _returns); err != nil { + log.Printf("RPC call to GetEmojiList API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetEmojiList(args *Z_GetEmojiListArgs, returns *Z_GetEmojiListReturns) error { + if hook, ok := s.impl.(interface { + GetEmojiList(sortBy string, page, perPage int) ([]*model.Emoji, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetEmojiList(args.A, args.B, args.C) + } else { + return encodableError(fmt.Errorf("API GetEmojiList called but not implemented.")) + } + return nil +} + +type Z_GetEmojiByNameArgs struct { + A string +} + +type Z_GetEmojiByNameReturns struct { + A *model.Emoji + B *model.AppError +} + +func (g *apiRPCClient) GetEmojiByName(name string) (*model.Emoji, *model.AppError) { + _args := &Z_GetEmojiByNameArgs{name} + _returns := &Z_GetEmojiByNameReturns{} + if err := g.client.Call("Plugin.GetEmojiByName", _args, _returns); err != nil { + log.Printf("RPC call to GetEmojiByName API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetEmojiByName(args *Z_GetEmojiByNameArgs, returns *Z_GetEmojiByNameReturns) error { + if hook, ok := s.impl.(interface { + GetEmojiByName(name string) (*model.Emoji, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetEmojiByName(args.A) + } else { + return encodableError(fmt.Errorf("API GetEmojiByName called but not implemented.")) + } + return nil +} + +type Z_GetEmojiArgs struct { + A string +} + +type Z_GetEmojiReturns struct { + A *model.Emoji + B *model.AppError +} + +func (g *apiRPCClient) GetEmoji(emojiId string) (*model.Emoji, *model.AppError) { + _args := &Z_GetEmojiArgs{emojiId} + _returns := &Z_GetEmojiReturns{} + if err := g.client.Call("Plugin.GetEmoji", _args, _returns); err != nil { + log.Printf("RPC call to GetEmoji API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetEmoji(args *Z_GetEmojiArgs, returns *Z_GetEmojiReturns) error { + if hook, ok := s.impl.(interface { + GetEmoji(emojiId string) (*model.Emoji, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetEmoji(args.A) + } else { + return encodableError(fmt.Errorf("API GetEmoji called but not implemented.")) + } + return nil +} + +type Z_CopyFileInfosArgs struct { + A string + B []string +} + +type Z_CopyFileInfosReturns struct { + A []string + B *model.AppError +} + +func (g *apiRPCClient) CopyFileInfos(userID string, fileIds []string) ([]string, *model.AppError) { + _args := &Z_CopyFileInfosArgs{userID, fileIds} + _returns := &Z_CopyFileInfosReturns{} + if err := g.client.Call("Plugin.CopyFileInfos", _args, _returns); err != nil { + log.Printf("RPC call to CopyFileInfos API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) CopyFileInfos(args *Z_CopyFileInfosArgs, returns *Z_CopyFileInfosReturns) error { + if hook, ok := s.impl.(interface { + CopyFileInfos(userID string, fileIds []string) ([]string, *model.AppError) + }); ok { + returns.A, returns.B = hook.CopyFileInfos(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API CopyFileInfos called but not implemented.")) + } + return nil +} + +type Z_GetFileInfoArgs struct { + A string +} + +type Z_GetFileInfoReturns struct { + A *model.FileInfo + B *model.AppError +} + +func (g *apiRPCClient) GetFileInfo(fileId string) (*model.FileInfo, *model.AppError) { + _args := &Z_GetFileInfoArgs{fileId} + _returns := &Z_GetFileInfoReturns{} + if err := g.client.Call("Plugin.GetFileInfo", _args, _returns); err != nil { + log.Printf("RPC call to GetFileInfo API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetFileInfo(args *Z_GetFileInfoArgs, returns *Z_GetFileInfoReturns) error { + if hook, ok := s.impl.(interface { + GetFileInfo(fileId string) (*model.FileInfo, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetFileInfo(args.A) + } else { + return encodableError(fmt.Errorf("API GetFileInfo called but not implemented.")) + } + return nil +} + +type Z_SetFileSearchableContentArgs struct { + A string + B string +} + +type Z_SetFileSearchableContentReturns struct { + A *model.AppError +} + +func (g *apiRPCClient) SetFileSearchableContent(fileID string, content string) *model.AppError { + _args := &Z_SetFileSearchableContentArgs{fileID, content} + _returns := &Z_SetFileSearchableContentReturns{} + if err := g.client.Call("Plugin.SetFileSearchableContent", _args, _returns); err != nil { + log.Printf("RPC call to SetFileSearchableContent API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) SetFileSearchableContent(args *Z_SetFileSearchableContentArgs, returns *Z_SetFileSearchableContentReturns) error { + if hook, ok := s.impl.(interface { + SetFileSearchableContent(fileID string, content string) *model.AppError + }); ok { + returns.A = hook.SetFileSearchableContent(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API SetFileSearchableContent called but not implemented.")) + } + return nil +} + +type Z_GetFileInfosArgs struct { + A int + B int + C *model.GetFileInfosOptions +} + +type Z_GetFileInfosReturns struct { + A []*model.FileInfo + B *model.AppError +} + +func (g *apiRPCClient) GetFileInfos(page, perPage int, opt *model.GetFileInfosOptions) ([]*model.FileInfo, *model.AppError) { + _args := &Z_GetFileInfosArgs{page, perPage, opt} + _returns := &Z_GetFileInfosReturns{} + if err := g.client.Call("Plugin.GetFileInfos", _args, _returns); err != nil { + log.Printf("RPC call to GetFileInfos API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetFileInfos(args *Z_GetFileInfosArgs, returns *Z_GetFileInfosReturns) error { + if hook, ok := s.impl.(interface { + GetFileInfos(page, perPage int, opt *model.GetFileInfosOptions) ([]*model.FileInfo, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetFileInfos(args.A, args.B, args.C) + } else { + return encodableError(fmt.Errorf("API GetFileInfos called but not implemented.")) + } + return nil +} + +type Z_GetFileArgs struct { + A string +} + +type Z_GetFileReturns struct { + A []byte + B *model.AppError +} + +func (g *apiRPCClient) GetFile(fileId string) ([]byte, *model.AppError) { + _args := &Z_GetFileArgs{fileId} + _returns := &Z_GetFileReturns{} + if err := g.client.Call("Plugin.GetFile", _args, _returns); err != nil { + log.Printf("RPC call to GetFile API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetFile(args *Z_GetFileArgs, returns *Z_GetFileReturns) error { + if hook, ok := s.impl.(interface { + GetFile(fileId string) ([]byte, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetFile(args.A) + } else { + return encodableError(fmt.Errorf("API GetFile called but not implemented.")) + } + return nil +} + +type Z_GetFileLinkArgs struct { + A string +} + +type Z_GetFileLinkReturns struct { + A string + B *model.AppError +} + +func (g *apiRPCClient) GetFileLink(fileId string) (string, *model.AppError) { + _args := &Z_GetFileLinkArgs{fileId} + _returns := &Z_GetFileLinkReturns{} + if err := g.client.Call("Plugin.GetFileLink", _args, _returns); err != nil { + log.Printf("RPC call to GetFileLink API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetFileLink(args *Z_GetFileLinkArgs, returns *Z_GetFileLinkReturns) error { + if hook, ok := s.impl.(interface { + GetFileLink(fileId string) (string, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetFileLink(args.A) + } else { + return encodableError(fmt.Errorf("API GetFileLink called but not implemented.")) + } + return nil +} + +type Z_ReadFileArgs struct { + A string +} + +type Z_ReadFileReturns struct { + A []byte + B *model.AppError +} + +func (g *apiRPCClient) ReadFile(path string) ([]byte, *model.AppError) { + _args := &Z_ReadFileArgs{path} + _returns := &Z_ReadFileReturns{} + if err := g.client.Call("Plugin.ReadFile", _args, _returns); err != nil { + log.Printf("RPC call to ReadFile API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) ReadFile(args *Z_ReadFileArgs, returns *Z_ReadFileReturns) error { + if hook, ok := s.impl.(interface { + ReadFile(path string) ([]byte, *model.AppError) + }); ok { + returns.A, returns.B = hook.ReadFile(args.A) + } else { + return encodableError(fmt.Errorf("API ReadFile called but not implemented.")) + } + return nil +} + +type Z_GetEmojiImageArgs struct { + A string +} + +type Z_GetEmojiImageReturns struct { + A []byte + B string + C *model.AppError +} + +func (g *apiRPCClient) GetEmojiImage(emojiId string) ([]byte, string, *model.AppError) { + _args := &Z_GetEmojiImageArgs{emojiId} + _returns := &Z_GetEmojiImageReturns{} + if err := g.client.Call("Plugin.GetEmojiImage", _args, _returns); err != nil { + log.Printf("RPC call to GetEmojiImage API failed: %s", err.Error()) + } + return _returns.A, _returns.B, _returns.C +} + +func (s *apiRPCServer) GetEmojiImage(args *Z_GetEmojiImageArgs, returns *Z_GetEmojiImageReturns) error { + if hook, ok := s.impl.(interface { + GetEmojiImage(emojiId string) ([]byte, string, *model.AppError) + }); ok { + returns.A, returns.B, returns.C = hook.GetEmojiImage(args.A) + } else { + return encodableError(fmt.Errorf("API GetEmojiImage called but not implemented.")) + } + return nil +} + +type Z_UploadFileArgs struct { + A []byte + B string + C string +} + +type Z_UploadFileReturns struct { + A *model.FileInfo + B *model.AppError +} + +func (g *apiRPCClient) UploadFile(data []byte, channelId string, filename string) (*model.FileInfo, *model.AppError) { + _args := &Z_UploadFileArgs{data, channelId, filename} + _returns := &Z_UploadFileReturns{} + if err := g.client.Call("Plugin.UploadFile", _args, _returns); err != nil { + log.Printf("RPC call to UploadFile API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) UploadFile(args *Z_UploadFileArgs, returns *Z_UploadFileReturns) error { + if hook, ok := s.impl.(interface { + UploadFile(data []byte, channelId string, filename string) (*model.FileInfo, *model.AppError) + }); ok { + returns.A, returns.B = hook.UploadFile(args.A, args.B, args.C) + } else { + return encodableError(fmt.Errorf("API UploadFile called but not implemented.")) + } + return nil +} + +type Z_OpenInteractiveDialogArgs struct { + A model.OpenDialogRequest +} + +type Z_OpenInteractiveDialogReturns struct { + A *model.AppError +} + +func (g *apiRPCClient) OpenInteractiveDialog(dialog model.OpenDialogRequest) *model.AppError { + _args := &Z_OpenInteractiveDialogArgs{dialog} + _returns := &Z_OpenInteractiveDialogReturns{} + if err := g.client.Call("Plugin.OpenInteractiveDialog", _args, _returns); err != nil { + log.Printf("RPC call to OpenInteractiveDialog API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) OpenInteractiveDialog(args *Z_OpenInteractiveDialogArgs, returns *Z_OpenInteractiveDialogReturns) error { + if hook, ok := s.impl.(interface { + OpenInteractiveDialog(dialog model.OpenDialogRequest) *model.AppError + }); ok { + returns.A = hook.OpenInteractiveDialog(args.A) + } else { + return encodableError(fmt.Errorf("API OpenInteractiveDialog called but not implemented.")) + } + return nil +} + +type Z_GetPluginsArgs struct { +} + +type Z_GetPluginsReturns struct { + A []*model.Manifest + B *model.AppError +} + +func (g *apiRPCClient) GetPlugins() ([]*model.Manifest, *model.AppError) { + _args := &Z_GetPluginsArgs{} + _returns := &Z_GetPluginsReturns{} + if err := g.client.Call("Plugin.GetPlugins", _args, _returns); err != nil { + log.Printf("RPC call to GetPlugins API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetPlugins(args *Z_GetPluginsArgs, returns *Z_GetPluginsReturns) error { + if hook, ok := s.impl.(interface { + GetPlugins() ([]*model.Manifest, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetPlugins() + } else { + return encodableError(fmt.Errorf("API GetPlugins called but not implemented.")) + } + return nil +} + +type Z_EnablePluginArgs struct { + A string +} + +type Z_EnablePluginReturns struct { + A *model.AppError +} + +func (g *apiRPCClient) EnablePlugin(id string) *model.AppError { + _args := &Z_EnablePluginArgs{id} + _returns := &Z_EnablePluginReturns{} + if err := g.client.Call("Plugin.EnablePlugin", _args, _returns); err != nil { + log.Printf("RPC call to EnablePlugin API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) EnablePlugin(args *Z_EnablePluginArgs, returns *Z_EnablePluginReturns) error { + if hook, ok := s.impl.(interface { + EnablePlugin(id string) *model.AppError + }); ok { + returns.A = hook.EnablePlugin(args.A) + } else { + return encodableError(fmt.Errorf("API EnablePlugin called but not implemented.")) + } + return nil +} + +type Z_DisablePluginArgs struct { + A string +} + +type Z_DisablePluginReturns struct { + A *model.AppError +} + +func (g *apiRPCClient) DisablePlugin(id string) *model.AppError { + _args := &Z_DisablePluginArgs{id} + _returns := &Z_DisablePluginReturns{} + if err := g.client.Call("Plugin.DisablePlugin", _args, _returns); err != nil { + log.Printf("RPC call to DisablePlugin API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) DisablePlugin(args *Z_DisablePluginArgs, returns *Z_DisablePluginReturns) error { + if hook, ok := s.impl.(interface { + DisablePlugin(id string) *model.AppError + }); ok { + returns.A = hook.DisablePlugin(args.A) + } else { + return encodableError(fmt.Errorf("API DisablePlugin called but not implemented.")) + } + return nil +} + +type Z_RemovePluginArgs struct { + A string +} + +type Z_RemovePluginReturns struct { + A *model.AppError +} + +func (g *apiRPCClient) RemovePlugin(id string) *model.AppError { + _args := &Z_RemovePluginArgs{id} + _returns := &Z_RemovePluginReturns{} + if err := g.client.Call("Plugin.RemovePlugin", _args, _returns); err != nil { + log.Printf("RPC call to RemovePlugin API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) RemovePlugin(args *Z_RemovePluginArgs, returns *Z_RemovePluginReturns) error { + if hook, ok := s.impl.(interface { + RemovePlugin(id string) *model.AppError + }); ok { + returns.A = hook.RemovePlugin(args.A) + } else { + return encodableError(fmt.Errorf("API RemovePlugin called but not implemented.")) + } + return nil +} + +type Z_GetPluginStatusArgs struct { + A string +} + +type Z_GetPluginStatusReturns struct { + A *model.PluginStatus + B *model.AppError +} + +func (g *apiRPCClient) GetPluginStatus(id string) (*model.PluginStatus, *model.AppError) { + _args := &Z_GetPluginStatusArgs{id} + _returns := &Z_GetPluginStatusReturns{} + if err := g.client.Call("Plugin.GetPluginStatus", _args, _returns); err != nil { + log.Printf("RPC call to GetPluginStatus API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetPluginStatus(args *Z_GetPluginStatusArgs, returns *Z_GetPluginStatusReturns) error { + if hook, ok := s.impl.(interface { + GetPluginStatus(id string) (*model.PluginStatus, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetPluginStatus(args.A) + } else { + return encodableError(fmt.Errorf("API GetPluginStatus called but not implemented.")) + } + return nil +} + +type Z_KVSetArgs struct { + A string + B []byte +} + +type Z_KVSetReturns struct { + A *model.AppError +} + +func (g *apiRPCClient) KVSet(key string, value []byte) *model.AppError { + _args := &Z_KVSetArgs{key, value} + _returns := &Z_KVSetReturns{} + if err := g.client.Call("Plugin.KVSet", _args, _returns); err != nil { + log.Printf("RPC call to KVSet API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) KVSet(args *Z_KVSetArgs, returns *Z_KVSetReturns) error { + if hook, ok := s.impl.(interface { + KVSet(key string, value []byte) *model.AppError + }); ok { + returns.A = hook.KVSet(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API KVSet called but not implemented.")) + } + return nil +} + +type Z_KVCompareAndSetArgs struct { + A string + B []byte + C []byte +} + +type Z_KVCompareAndSetReturns struct { + A bool + B *model.AppError +} + +func (g *apiRPCClient) KVCompareAndSet(key string, oldValue, newValue []byte) (bool, *model.AppError) { + _args := &Z_KVCompareAndSetArgs{key, oldValue, newValue} + _returns := &Z_KVCompareAndSetReturns{} + if err := g.client.Call("Plugin.KVCompareAndSet", _args, _returns); err != nil { + log.Printf("RPC call to KVCompareAndSet API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) KVCompareAndSet(args *Z_KVCompareAndSetArgs, returns *Z_KVCompareAndSetReturns) error { + if hook, ok := s.impl.(interface { + KVCompareAndSet(key string, oldValue, newValue []byte) (bool, *model.AppError) + }); ok { + returns.A, returns.B = hook.KVCompareAndSet(args.A, args.B, args.C) + } else { + return encodableError(fmt.Errorf("API KVCompareAndSet called but not implemented.")) + } + return nil +} + +type Z_KVCompareAndDeleteArgs struct { + A string + B []byte +} + +type Z_KVCompareAndDeleteReturns struct { + A bool + B *model.AppError +} + +func (g *apiRPCClient) KVCompareAndDelete(key string, oldValue []byte) (bool, *model.AppError) { + _args := &Z_KVCompareAndDeleteArgs{key, oldValue} + _returns := &Z_KVCompareAndDeleteReturns{} + if err := g.client.Call("Plugin.KVCompareAndDelete", _args, _returns); err != nil { + log.Printf("RPC call to KVCompareAndDelete API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) KVCompareAndDelete(args *Z_KVCompareAndDeleteArgs, returns *Z_KVCompareAndDeleteReturns) error { + if hook, ok := s.impl.(interface { + KVCompareAndDelete(key string, oldValue []byte) (bool, *model.AppError) + }); ok { + returns.A, returns.B = hook.KVCompareAndDelete(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API KVCompareAndDelete called but not implemented.")) + } + return nil +} + +type Z_KVSetWithOptionsArgs struct { + A string + B []byte + C model.PluginKVSetOptions +} + +type Z_KVSetWithOptionsReturns struct { + A bool + B *model.AppError +} + +func (g *apiRPCClient) KVSetWithOptions(key string, value []byte, options model.PluginKVSetOptions) (bool, *model.AppError) { + _args := &Z_KVSetWithOptionsArgs{key, value, options} + _returns := &Z_KVSetWithOptionsReturns{} + if err := g.client.Call("Plugin.KVSetWithOptions", _args, _returns); err != nil { + log.Printf("RPC call to KVSetWithOptions API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) KVSetWithOptions(args *Z_KVSetWithOptionsArgs, returns *Z_KVSetWithOptionsReturns) error { + if hook, ok := s.impl.(interface { + KVSetWithOptions(key string, value []byte, options model.PluginKVSetOptions) (bool, *model.AppError) + }); ok { + returns.A, returns.B = hook.KVSetWithOptions(args.A, args.B, args.C) + } else { + return encodableError(fmt.Errorf("API KVSetWithOptions called but not implemented.")) + } + return nil +} + +type Z_KVSetWithExpiryArgs struct { + A string + B []byte + C int64 +} + +type Z_KVSetWithExpiryReturns struct { + A *model.AppError +} + +func (g *apiRPCClient) KVSetWithExpiry(key string, value []byte, expireInSeconds int64) *model.AppError { + _args := &Z_KVSetWithExpiryArgs{key, value, expireInSeconds} + _returns := &Z_KVSetWithExpiryReturns{} + if err := g.client.Call("Plugin.KVSetWithExpiry", _args, _returns); err != nil { + log.Printf("RPC call to KVSetWithExpiry API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) KVSetWithExpiry(args *Z_KVSetWithExpiryArgs, returns *Z_KVSetWithExpiryReturns) error { + if hook, ok := s.impl.(interface { + KVSetWithExpiry(key string, value []byte, expireInSeconds int64) *model.AppError + }); ok { + returns.A = hook.KVSetWithExpiry(args.A, args.B, args.C) + } else { + return encodableError(fmt.Errorf("API KVSetWithExpiry called but not implemented.")) + } + return nil +} + +type Z_KVGetArgs struct { + A string +} + +type Z_KVGetReturns struct { + A []byte + B *model.AppError +} + +func (g *apiRPCClient) KVGet(key string) ([]byte, *model.AppError) { + _args := &Z_KVGetArgs{key} + _returns := &Z_KVGetReturns{} + if err := g.client.Call("Plugin.KVGet", _args, _returns); err != nil { + log.Printf("RPC call to KVGet API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) KVGet(args *Z_KVGetArgs, returns *Z_KVGetReturns) error { + if hook, ok := s.impl.(interface { + KVGet(key string) ([]byte, *model.AppError) + }); ok { + returns.A, returns.B = hook.KVGet(args.A) + } else { + return encodableError(fmt.Errorf("API KVGet called but not implemented.")) + } + return nil +} + +type Z_KVDeleteArgs struct { + A string +} + +type Z_KVDeleteReturns struct { + A *model.AppError +} + +func (g *apiRPCClient) KVDelete(key string) *model.AppError { + _args := &Z_KVDeleteArgs{key} + _returns := &Z_KVDeleteReturns{} + if err := g.client.Call("Plugin.KVDelete", _args, _returns); err != nil { + log.Printf("RPC call to KVDelete API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) KVDelete(args *Z_KVDeleteArgs, returns *Z_KVDeleteReturns) error { + if hook, ok := s.impl.(interface { + KVDelete(key string) *model.AppError + }); ok { + returns.A = hook.KVDelete(args.A) + } else { + return encodableError(fmt.Errorf("API KVDelete called but not implemented.")) + } + return nil +} + +type Z_KVDeleteAllArgs struct { +} + +type Z_KVDeleteAllReturns struct { + A *model.AppError +} + +func (g *apiRPCClient) KVDeleteAll() *model.AppError { + _args := &Z_KVDeleteAllArgs{} + _returns := &Z_KVDeleteAllReturns{} + if err := g.client.Call("Plugin.KVDeleteAll", _args, _returns); err != nil { + log.Printf("RPC call to KVDeleteAll API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) KVDeleteAll(args *Z_KVDeleteAllArgs, returns *Z_KVDeleteAllReturns) error { + if hook, ok := s.impl.(interface { + KVDeleteAll() *model.AppError + }); ok { + returns.A = hook.KVDeleteAll() + } else { + return encodableError(fmt.Errorf("API KVDeleteAll called but not implemented.")) + } + return nil +} + +type Z_KVListArgs struct { + A int + B int +} + +type Z_KVListReturns struct { + A []string + B *model.AppError +} + +func (g *apiRPCClient) KVList(page, perPage int) ([]string, *model.AppError) { + _args := &Z_KVListArgs{page, perPage} + _returns := &Z_KVListReturns{} + if err := g.client.Call("Plugin.KVList", _args, _returns); err != nil { + log.Printf("RPC call to KVList API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) KVList(args *Z_KVListArgs, returns *Z_KVListReturns) error { + if hook, ok := s.impl.(interface { + KVList(page, perPage int) ([]string, *model.AppError) + }); ok { + returns.A, returns.B = hook.KVList(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API KVList called but not implemented.")) + } + return nil +} + +type Z_PublishWebSocketEventArgs struct { + A string + B map[string]any + C *model.WebsocketBroadcast +} + +type Z_PublishWebSocketEventReturns struct { +} + +func (g *apiRPCClient) PublishWebSocketEvent(event string, payload map[string]any, broadcast *model.WebsocketBroadcast) { + _args := &Z_PublishWebSocketEventArgs{event, payload, broadcast} + _returns := &Z_PublishWebSocketEventReturns{} + if err := g.client.Call("Plugin.PublishWebSocketEvent", _args, _returns); err != nil { + log.Printf("RPC call to PublishWebSocketEvent API failed: %s", err.Error()) + } + +} + +func (s *apiRPCServer) PublishWebSocketEvent(args *Z_PublishWebSocketEventArgs, returns *Z_PublishWebSocketEventReturns) error { + if hook, ok := s.impl.(interface { + PublishWebSocketEvent(event string, payload map[string]any, broadcast *model.WebsocketBroadcast) + }); ok { + hook.PublishWebSocketEvent(args.A, args.B, args.C) + } else { + return encodableError(fmt.Errorf("API PublishWebSocketEvent called but not implemented.")) + } + return nil +} + +type Z_HasPermissionToArgs struct { + A string + B *model.Permission +} + +type Z_HasPermissionToReturns struct { + A bool +} + +func (g *apiRPCClient) HasPermissionTo(userID string, permission *model.Permission) bool { + _args := &Z_HasPermissionToArgs{userID, permission} + _returns := &Z_HasPermissionToReturns{} + if err := g.client.Call("Plugin.HasPermissionTo", _args, _returns); err != nil { + log.Printf("RPC call to HasPermissionTo API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) HasPermissionTo(args *Z_HasPermissionToArgs, returns *Z_HasPermissionToReturns) error { + if hook, ok := s.impl.(interface { + HasPermissionTo(userID string, permission *model.Permission) bool + }); ok { + returns.A = hook.HasPermissionTo(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API HasPermissionTo called but not implemented.")) + } + return nil +} + +type Z_HasPermissionToTeamArgs struct { + A string + B string + C *model.Permission +} + +type Z_HasPermissionToTeamReturns struct { + A bool +} + +func (g *apiRPCClient) HasPermissionToTeam(userID, teamID string, permission *model.Permission) bool { + _args := &Z_HasPermissionToTeamArgs{userID, teamID, permission} + _returns := &Z_HasPermissionToTeamReturns{} + if err := g.client.Call("Plugin.HasPermissionToTeam", _args, _returns); err != nil { + log.Printf("RPC call to HasPermissionToTeam API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) HasPermissionToTeam(args *Z_HasPermissionToTeamArgs, returns *Z_HasPermissionToTeamReturns) error { + if hook, ok := s.impl.(interface { + HasPermissionToTeam(userID, teamID string, permission *model.Permission) bool + }); ok { + returns.A = hook.HasPermissionToTeam(args.A, args.B, args.C) + } else { + return encodableError(fmt.Errorf("API HasPermissionToTeam called but not implemented.")) + } + return nil +} + +type Z_HasPermissionToChannelArgs struct { + A string + B string + C *model.Permission +} + +type Z_HasPermissionToChannelReturns struct { + A bool +} + +func (g *apiRPCClient) HasPermissionToChannel(userID, channelId string, permission *model.Permission) bool { + _args := &Z_HasPermissionToChannelArgs{userID, channelId, permission} + _returns := &Z_HasPermissionToChannelReturns{} + if err := g.client.Call("Plugin.HasPermissionToChannel", _args, _returns); err != nil { + log.Printf("RPC call to HasPermissionToChannel API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) HasPermissionToChannel(args *Z_HasPermissionToChannelArgs, returns *Z_HasPermissionToChannelReturns) error { + if hook, ok := s.impl.(interface { + HasPermissionToChannel(userID, channelId string, permission *model.Permission) bool + }); ok { + returns.A = hook.HasPermissionToChannel(args.A, args.B, args.C) + } else { + return encodableError(fmt.Errorf("API HasPermissionToChannel called but not implemented.")) + } + return nil +} + +type Z_RolesGrantPermissionArgs struct { + A []string + B string +} + +type Z_RolesGrantPermissionReturns struct { + A bool +} + +func (g *apiRPCClient) RolesGrantPermission(roleNames []string, permissionId string) bool { + _args := &Z_RolesGrantPermissionArgs{roleNames, permissionId} + _returns := &Z_RolesGrantPermissionReturns{} + if err := g.client.Call("Plugin.RolesGrantPermission", _args, _returns); err != nil { + log.Printf("RPC call to RolesGrantPermission API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) RolesGrantPermission(args *Z_RolesGrantPermissionArgs, returns *Z_RolesGrantPermissionReturns) error { + if hook, ok := s.impl.(interface { + RolesGrantPermission(roleNames []string, permissionId string) bool + }); ok { + returns.A = hook.RolesGrantPermission(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API RolesGrantPermission called but not implemented.")) + } + return nil +} + +type Z_SendMailArgs struct { + A string + B string + C string +} + +type Z_SendMailReturns struct { + A *model.AppError +} + +func (g *apiRPCClient) SendMail(to, subject, htmlBody string) *model.AppError { + _args := &Z_SendMailArgs{to, subject, htmlBody} + _returns := &Z_SendMailReturns{} + if err := g.client.Call("Plugin.SendMail", _args, _returns); err != nil { + log.Printf("RPC call to SendMail API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) SendMail(args *Z_SendMailArgs, returns *Z_SendMailReturns) error { + if hook, ok := s.impl.(interface { + SendMail(to, subject, htmlBody string) *model.AppError + }); ok { + returns.A = hook.SendMail(args.A, args.B, args.C) + } else { + return encodableError(fmt.Errorf("API SendMail called but not implemented.")) + } + return nil +} + +type Z_CreateBotArgs struct { + A *model.Bot +} + +type Z_CreateBotReturns struct { + A *model.Bot + B *model.AppError +} + +func (g *apiRPCClient) CreateBot(bot *model.Bot) (*model.Bot, *model.AppError) { + _args := &Z_CreateBotArgs{bot} + _returns := &Z_CreateBotReturns{} + if err := g.client.Call("Plugin.CreateBot", _args, _returns); err != nil { + log.Printf("RPC call to CreateBot API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) CreateBot(args *Z_CreateBotArgs, returns *Z_CreateBotReturns) error { + if hook, ok := s.impl.(interface { + CreateBot(bot *model.Bot) (*model.Bot, *model.AppError) + }); ok { + returns.A, returns.B = hook.CreateBot(args.A) + } else { + return encodableError(fmt.Errorf("API CreateBot called but not implemented.")) + } + return nil +} + +type Z_PatchBotArgs struct { + A string + B *model.BotPatch +} + +type Z_PatchBotReturns struct { + A *model.Bot + B *model.AppError +} + +func (g *apiRPCClient) PatchBot(botUserId string, botPatch *model.BotPatch) (*model.Bot, *model.AppError) { + _args := &Z_PatchBotArgs{botUserId, botPatch} + _returns := &Z_PatchBotReturns{} + if err := g.client.Call("Plugin.PatchBot", _args, _returns); err != nil { + log.Printf("RPC call to PatchBot API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) PatchBot(args *Z_PatchBotArgs, returns *Z_PatchBotReturns) error { + if hook, ok := s.impl.(interface { + PatchBot(botUserId string, botPatch *model.BotPatch) (*model.Bot, *model.AppError) + }); ok { + returns.A, returns.B = hook.PatchBot(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API PatchBot called but not implemented.")) + } + return nil +} + +type Z_GetBotArgs struct { + A string + B bool +} + +type Z_GetBotReturns struct { + A *model.Bot + B *model.AppError +} + +func (g *apiRPCClient) GetBot(botUserId string, includeDeleted bool) (*model.Bot, *model.AppError) { + _args := &Z_GetBotArgs{botUserId, includeDeleted} + _returns := &Z_GetBotReturns{} + if err := g.client.Call("Plugin.GetBot", _args, _returns); err != nil { + log.Printf("RPC call to GetBot API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetBot(args *Z_GetBotArgs, returns *Z_GetBotReturns) error { + if hook, ok := s.impl.(interface { + GetBot(botUserId string, includeDeleted bool) (*model.Bot, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetBot(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API GetBot called but not implemented.")) + } + return nil +} + +type Z_GetBotsArgs struct { + A *model.BotGetOptions +} + +type Z_GetBotsReturns struct { + A []*model.Bot + B *model.AppError +} + +func (g *apiRPCClient) GetBots(options *model.BotGetOptions) ([]*model.Bot, *model.AppError) { + _args := &Z_GetBotsArgs{options} + _returns := &Z_GetBotsReturns{} + if err := g.client.Call("Plugin.GetBots", _args, _returns); err != nil { + log.Printf("RPC call to GetBots API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetBots(args *Z_GetBotsArgs, returns *Z_GetBotsReturns) error { + if hook, ok := s.impl.(interface { + GetBots(options *model.BotGetOptions) ([]*model.Bot, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetBots(args.A) + } else { + return encodableError(fmt.Errorf("API GetBots called but not implemented.")) + } + return nil +} + +type Z_UpdateBotActiveArgs struct { + A string + B bool +} + +type Z_UpdateBotActiveReturns struct { + A *model.Bot + B *model.AppError +} + +func (g *apiRPCClient) UpdateBotActive(botUserId string, active bool) (*model.Bot, *model.AppError) { + _args := &Z_UpdateBotActiveArgs{botUserId, active} + _returns := &Z_UpdateBotActiveReturns{} + if err := g.client.Call("Plugin.UpdateBotActive", _args, _returns); err != nil { + log.Printf("RPC call to UpdateBotActive API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) UpdateBotActive(args *Z_UpdateBotActiveArgs, returns *Z_UpdateBotActiveReturns) error { + if hook, ok := s.impl.(interface { + UpdateBotActive(botUserId string, active bool) (*model.Bot, *model.AppError) + }); ok { + returns.A, returns.B = hook.UpdateBotActive(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API UpdateBotActive called but not implemented.")) + } + return nil +} + +type Z_PermanentDeleteBotArgs struct { + A string +} + +type Z_PermanentDeleteBotReturns struct { + A *model.AppError +} + +func (g *apiRPCClient) PermanentDeleteBot(botUserId string) *model.AppError { + _args := &Z_PermanentDeleteBotArgs{botUserId} + _returns := &Z_PermanentDeleteBotReturns{} + if err := g.client.Call("Plugin.PermanentDeleteBot", _args, _returns); err != nil { + log.Printf("RPC call to PermanentDeleteBot API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) PermanentDeleteBot(args *Z_PermanentDeleteBotArgs, returns *Z_PermanentDeleteBotReturns) error { + if hook, ok := s.impl.(interface { + PermanentDeleteBot(botUserId string) *model.AppError + }); ok { + returns.A = hook.PermanentDeleteBot(args.A) + } else { + return encodableError(fmt.Errorf("API PermanentDeleteBot called but not implemented.")) + } + return nil +} + +type Z_PublishUserTypingArgs struct { + A string + B string + C string +} + +type Z_PublishUserTypingReturns struct { + A *model.AppError +} + +func (g *apiRPCClient) PublishUserTyping(userID, channelId, parentId string) *model.AppError { + _args := &Z_PublishUserTypingArgs{userID, channelId, parentId} + _returns := &Z_PublishUserTypingReturns{} + if err := g.client.Call("Plugin.PublishUserTyping", _args, _returns); err != nil { + log.Printf("RPC call to PublishUserTyping API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) PublishUserTyping(args *Z_PublishUserTypingArgs, returns *Z_PublishUserTypingReturns) error { + if hook, ok := s.impl.(interface { + PublishUserTyping(userID, channelId, parentId string) *model.AppError + }); ok { + returns.A = hook.PublishUserTyping(args.A, args.B, args.C) + } else { + return encodableError(fmt.Errorf("API PublishUserTyping called but not implemented.")) + } + return nil +} + +type Z_CreateCommandArgs struct { + A *model.Command +} + +type Z_CreateCommandReturns struct { + A *model.Command + B error +} + +func (g *apiRPCClient) CreateCommand(cmd *model.Command) (*model.Command, error) { + _args := &Z_CreateCommandArgs{cmd} + _returns := &Z_CreateCommandReturns{} + if err := g.client.Call("Plugin.CreateCommand", _args, _returns); err != nil { + log.Printf("RPC call to CreateCommand API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) CreateCommand(args *Z_CreateCommandArgs, returns *Z_CreateCommandReturns) error { + if hook, ok := s.impl.(interface { + CreateCommand(cmd *model.Command) (*model.Command, error) + }); ok { + returns.A, returns.B = hook.CreateCommand(args.A) + returns.B = encodableError(returns.B) + } else { + return encodableError(fmt.Errorf("API CreateCommand called but not implemented.")) + } + return nil +} + +type Z_ListCommandsArgs struct { + A string +} + +type Z_ListCommandsReturns struct { + A []*model.Command + B error +} + +func (g *apiRPCClient) ListCommands(teamID string) ([]*model.Command, error) { + _args := &Z_ListCommandsArgs{teamID} + _returns := &Z_ListCommandsReturns{} + if err := g.client.Call("Plugin.ListCommands", _args, _returns); err != nil { + log.Printf("RPC call to ListCommands API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) ListCommands(args *Z_ListCommandsArgs, returns *Z_ListCommandsReturns) error { + if hook, ok := s.impl.(interface { + ListCommands(teamID string) ([]*model.Command, error) + }); ok { + returns.A, returns.B = hook.ListCommands(args.A) + returns.B = encodableError(returns.B) + } else { + return encodableError(fmt.Errorf("API ListCommands called but not implemented.")) + } + return nil +} + +type Z_ListCustomCommandsArgs struct { + A string +} + +type Z_ListCustomCommandsReturns struct { + A []*model.Command + B error +} + +func (g *apiRPCClient) ListCustomCommands(teamID string) ([]*model.Command, error) { + _args := &Z_ListCustomCommandsArgs{teamID} + _returns := &Z_ListCustomCommandsReturns{} + if err := g.client.Call("Plugin.ListCustomCommands", _args, _returns); err != nil { + log.Printf("RPC call to ListCustomCommands API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) ListCustomCommands(args *Z_ListCustomCommandsArgs, returns *Z_ListCustomCommandsReturns) error { + if hook, ok := s.impl.(interface { + ListCustomCommands(teamID string) ([]*model.Command, error) + }); ok { + returns.A, returns.B = hook.ListCustomCommands(args.A) + returns.B = encodableError(returns.B) + } else { + return encodableError(fmt.Errorf("API ListCustomCommands called but not implemented.")) + } + return nil +} + +type Z_ListPluginCommandsArgs struct { + A string +} + +type Z_ListPluginCommandsReturns struct { + A []*model.Command + B error +} + +func (g *apiRPCClient) ListPluginCommands(teamID string) ([]*model.Command, error) { + _args := &Z_ListPluginCommandsArgs{teamID} + _returns := &Z_ListPluginCommandsReturns{} + if err := g.client.Call("Plugin.ListPluginCommands", _args, _returns); err != nil { + log.Printf("RPC call to ListPluginCommands API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) ListPluginCommands(args *Z_ListPluginCommandsArgs, returns *Z_ListPluginCommandsReturns) error { + if hook, ok := s.impl.(interface { + ListPluginCommands(teamID string) ([]*model.Command, error) + }); ok { + returns.A, returns.B = hook.ListPluginCommands(args.A) + returns.B = encodableError(returns.B) + } else { + return encodableError(fmt.Errorf("API ListPluginCommands called but not implemented.")) + } + return nil +} + +type Z_ListBuiltInCommandsArgs struct { +} + +type Z_ListBuiltInCommandsReturns struct { + A []*model.Command + B error +} + +func (g *apiRPCClient) ListBuiltInCommands() ([]*model.Command, error) { + _args := &Z_ListBuiltInCommandsArgs{} + _returns := &Z_ListBuiltInCommandsReturns{} + if err := g.client.Call("Plugin.ListBuiltInCommands", _args, _returns); err != nil { + log.Printf("RPC call to ListBuiltInCommands API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) ListBuiltInCommands(args *Z_ListBuiltInCommandsArgs, returns *Z_ListBuiltInCommandsReturns) error { + if hook, ok := s.impl.(interface { + ListBuiltInCommands() ([]*model.Command, error) + }); ok { + returns.A, returns.B = hook.ListBuiltInCommands() + returns.B = encodableError(returns.B) + } else { + return encodableError(fmt.Errorf("API ListBuiltInCommands called but not implemented.")) + } + return nil +} + +type Z_GetCommandArgs struct { + A string +} + +type Z_GetCommandReturns struct { + A *model.Command + B error +} + +func (g *apiRPCClient) GetCommand(commandID string) (*model.Command, error) { + _args := &Z_GetCommandArgs{commandID} + _returns := &Z_GetCommandReturns{} + if err := g.client.Call("Plugin.GetCommand", _args, _returns); err != nil { + log.Printf("RPC call to GetCommand API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetCommand(args *Z_GetCommandArgs, returns *Z_GetCommandReturns) error { + if hook, ok := s.impl.(interface { + GetCommand(commandID string) (*model.Command, error) + }); ok { + returns.A, returns.B = hook.GetCommand(args.A) + returns.B = encodableError(returns.B) + } else { + return encodableError(fmt.Errorf("API GetCommand called but not implemented.")) + } + return nil +} + +type Z_UpdateCommandArgs struct { + A string + B *model.Command +} + +type Z_UpdateCommandReturns struct { + A *model.Command + B error +} + +func (g *apiRPCClient) UpdateCommand(commandID string, updatedCmd *model.Command) (*model.Command, error) { + _args := &Z_UpdateCommandArgs{commandID, updatedCmd} + _returns := &Z_UpdateCommandReturns{} + if err := g.client.Call("Plugin.UpdateCommand", _args, _returns); err != nil { + log.Printf("RPC call to UpdateCommand API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) UpdateCommand(args *Z_UpdateCommandArgs, returns *Z_UpdateCommandReturns) error { + if hook, ok := s.impl.(interface { + UpdateCommand(commandID string, updatedCmd *model.Command) (*model.Command, error) + }); ok { + returns.A, returns.B = hook.UpdateCommand(args.A, args.B) + returns.B = encodableError(returns.B) + } else { + return encodableError(fmt.Errorf("API UpdateCommand called but not implemented.")) + } + return nil +} + +type Z_DeleteCommandArgs struct { + A string +} + +type Z_DeleteCommandReturns struct { + A error +} + +func (g *apiRPCClient) DeleteCommand(commandID string) error { + _args := &Z_DeleteCommandArgs{commandID} + _returns := &Z_DeleteCommandReturns{} + if err := g.client.Call("Plugin.DeleteCommand", _args, _returns); err != nil { + log.Printf("RPC call to DeleteCommand API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) DeleteCommand(args *Z_DeleteCommandArgs, returns *Z_DeleteCommandReturns) error { + if hook, ok := s.impl.(interface { + DeleteCommand(commandID string) error + }); ok { + returns.A = hook.DeleteCommand(args.A) + returns.A = encodableError(returns.A) + } else { + return encodableError(fmt.Errorf("API DeleteCommand called but not implemented.")) + } + return nil +} + +type Z_CreateOAuthAppArgs struct { + A *model.OAuthApp +} + +type Z_CreateOAuthAppReturns struct { + A *model.OAuthApp + B *model.AppError +} + +func (g *apiRPCClient) CreateOAuthApp(app *model.OAuthApp) (*model.OAuthApp, *model.AppError) { + _args := &Z_CreateOAuthAppArgs{app} + _returns := &Z_CreateOAuthAppReturns{} + if err := g.client.Call("Plugin.CreateOAuthApp", _args, _returns); err != nil { + log.Printf("RPC call to CreateOAuthApp API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) CreateOAuthApp(args *Z_CreateOAuthAppArgs, returns *Z_CreateOAuthAppReturns) error { + if hook, ok := s.impl.(interface { + CreateOAuthApp(app *model.OAuthApp) (*model.OAuthApp, *model.AppError) + }); ok { + returns.A, returns.B = hook.CreateOAuthApp(args.A) + } else { + return encodableError(fmt.Errorf("API CreateOAuthApp called but not implemented.")) + } + return nil +} + +type Z_GetOAuthAppArgs struct { + A string +} + +type Z_GetOAuthAppReturns struct { + A *model.OAuthApp + B *model.AppError +} + +func (g *apiRPCClient) GetOAuthApp(appID string) (*model.OAuthApp, *model.AppError) { + _args := &Z_GetOAuthAppArgs{appID} + _returns := &Z_GetOAuthAppReturns{} + if err := g.client.Call("Plugin.GetOAuthApp", _args, _returns); err != nil { + log.Printf("RPC call to GetOAuthApp API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetOAuthApp(args *Z_GetOAuthAppArgs, returns *Z_GetOAuthAppReturns) error { + if hook, ok := s.impl.(interface { + GetOAuthApp(appID string) (*model.OAuthApp, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetOAuthApp(args.A) + } else { + return encodableError(fmt.Errorf("API GetOAuthApp called but not implemented.")) + } + return nil +} + +type Z_UpdateOAuthAppArgs struct { + A *model.OAuthApp +} + +type Z_UpdateOAuthAppReturns struct { + A *model.OAuthApp + B *model.AppError +} + +func (g *apiRPCClient) UpdateOAuthApp(app *model.OAuthApp) (*model.OAuthApp, *model.AppError) { + _args := &Z_UpdateOAuthAppArgs{app} + _returns := &Z_UpdateOAuthAppReturns{} + if err := g.client.Call("Plugin.UpdateOAuthApp", _args, _returns); err != nil { + log.Printf("RPC call to UpdateOAuthApp API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) UpdateOAuthApp(args *Z_UpdateOAuthAppArgs, returns *Z_UpdateOAuthAppReturns) error { + if hook, ok := s.impl.(interface { + UpdateOAuthApp(app *model.OAuthApp) (*model.OAuthApp, *model.AppError) + }); ok { + returns.A, returns.B = hook.UpdateOAuthApp(args.A) + } else { + return encodableError(fmt.Errorf("API UpdateOAuthApp called but not implemented.")) + } + return nil +} + +type Z_DeleteOAuthAppArgs struct { + A string +} + +type Z_DeleteOAuthAppReturns struct { + A *model.AppError +} + +func (g *apiRPCClient) DeleteOAuthApp(appID string) *model.AppError { + _args := &Z_DeleteOAuthAppArgs{appID} + _returns := &Z_DeleteOAuthAppReturns{} + if err := g.client.Call("Plugin.DeleteOAuthApp", _args, _returns); err != nil { + log.Printf("RPC call to DeleteOAuthApp API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) DeleteOAuthApp(args *Z_DeleteOAuthAppArgs, returns *Z_DeleteOAuthAppReturns) error { + if hook, ok := s.impl.(interface { + DeleteOAuthApp(appID string) *model.AppError + }); ok { + returns.A = hook.DeleteOAuthApp(args.A) + } else { + return encodableError(fmt.Errorf("API DeleteOAuthApp called but not implemented.")) + } + return nil +} + +type Z_PublishPluginClusterEventArgs struct { + A model.PluginClusterEvent + B model.PluginClusterEventSendOptions +} + +type Z_PublishPluginClusterEventReturns struct { + A error +} + +func (g *apiRPCClient) PublishPluginClusterEvent(ev model.PluginClusterEvent, opts model.PluginClusterEventSendOptions) error { + _args := &Z_PublishPluginClusterEventArgs{ev, opts} + _returns := &Z_PublishPluginClusterEventReturns{} + if err := g.client.Call("Plugin.PublishPluginClusterEvent", _args, _returns); err != nil { + log.Printf("RPC call to PublishPluginClusterEvent API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) PublishPluginClusterEvent(args *Z_PublishPluginClusterEventArgs, returns *Z_PublishPluginClusterEventReturns) error { + if hook, ok := s.impl.(interface { + PublishPluginClusterEvent(ev model.PluginClusterEvent, opts model.PluginClusterEventSendOptions) error + }); ok { + returns.A = hook.PublishPluginClusterEvent(args.A, args.B) + returns.A = encodableError(returns.A) + } else { + return encodableError(fmt.Errorf("API PublishPluginClusterEvent called but not implemented.")) + } + return nil +} + +type Z_RequestTrialLicenseArgs struct { + A string + B int + C bool + D bool +} + +type Z_RequestTrialLicenseReturns struct { + A *model.AppError +} + +func (g *apiRPCClient) RequestTrialLicense(requesterID string, users int, termsAccepted bool, receiveEmailsAccepted bool) *model.AppError { + _args := &Z_RequestTrialLicenseArgs{requesterID, users, termsAccepted, receiveEmailsAccepted} + _returns := &Z_RequestTrialLicenseReturns{} + if err := g.client.Call("Plugin.RequestTrialLicense", _args, _returns); err != nil { + log.Printf("RPC call to RequestTrialLicense API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) RequestTrialLicense(args *Z_RequestTrialLicenseArgs, returns *Z_RequestTrialLicenseReturns) error { + if hook, ok := s.impl.(interface { + RequestTrialLicense(requesterID string, users int, termsAccepted bool, receiveEmailsAccepted bool) *model.AppError + }); ok { + returns.A = hook.RequestTrialLicense(args.A, args.B, args.C, args.D) + } else { + return encodableError(fmt.Errorf("API RequestTrialLicense called but not implemented.")) + } + return nil +} + +type Z_GetCloudLimitsArgs struct { +} + +type Z_GetCloudLimitsReturns struct { + A *model.ProductLimits + B error +} + +func (g *apiRPCClient) GetCloudLimits() (*model.ProductLimits, error) { + _args := &Z_GetCloudLimitsArgs{} + _returns := &Z_GetCloudLimitsReturns{} + if err := g.client.Call("Plugin.GetCloudLimits", _args, _returns); err != nil { + log.Printf("RPC call to GetCloudLimits API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetCloudLimits(args *Z_GetCloudLimitsArgs, returns *Z_GetCloudLimitsReturns) error { + if hook, ok := s.impl.(interface { + GetCloudLimits() (*model.ProductLimits, error) + }); ok { + returns.A, returns.B = hook.GetCloudLimits() + returns.B = encodableError(returns.B) + } else { + return encodableError(fmt.Errorf("API GetCloudLimits called but not implemented.")) + } + return nil +} + +type Z_EnsureBotUserArgs struct { + A *model.Bot +} + +type Z_EnsureBotUserReturns struct { + A string + B error +} + +func (g *apiRPCClient) EnsureBotUser(bot *model.Bot) (string, error) { + _args := &Z_EnsureBotUserArgs{bot} + _returns := &Z_EnsureBotUserReturns{} + if err := g.client.Call("Plugin.EnsureBotUser", _args, _returns); err != nil { + log.Printf("RPC call to EnsureBotUser API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) EnsureBotUser(args *Z_EnsureBotUserArgs, returns *Z_EnsureBotUserReturns) error { + if hook, ok := s.impl.(interface { + EnsureBotUser(bot *model.Bot) (string, error) + }); ok { + returns.A, returns.B = hook.EnsureBotUser(args.A) + returns.B = encodableError(returns.B) + } else { + return encodableError(fmt.Errorf("API EnsureBotUser called but not implemented.")) + } + return nil +} + +type Z_RegisterCollectionAndTopicArgs struct { + A string + B string +} + +type Z_RegisterCollectionAndTopicReturns struct { + A error +} + +func (g *apiRPCClient) RegisterCollectionAndTopic(collectionType, topicType string) error { + _args := &Z_RegisterCollectionAndTopicArgs{collectionType, topicType} + _returns := &Z_RegisterCollectionAndTopicReturns{} + if err := g.client.Call("Plugin.RegisterCollectionAndTopic", _args, _returns); err != nil { + log.Printf("RPC call to RegisterCollectionAndTopic API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) RegisterCollectionAndTopic(args *Z_RegisterCollectionAndTopicArgs, returns *Z_RegisterCollectionAndTopicReturns) error { + if hook, ok := s.impl.(interface { + RegisterCollectionAndTopic(collectionType, topicType string) error + }); ok { + returns.A = hook.RegisterCollectionAndTopic(args.A, args.B) + returns.A = encodableError(returns.A) + } else { + return encodableError(fmt.Errorf("API RegisterCollectionAndTopic called but not implemented.")) + } + return nil +} + +type Z_CreateUploadSessionArgs struct { + A *model.UploadSession +} + +type Z_CreateUploadSessionReturns struct { + A *model.UploadSession + B error +} + +func (g *apiRPCClient) CreateUploadSession(us *model.UploadSession) (*model.UploadSession, error) { + _args := &Z_CreateUploadSessionArgs{us} + _returns := &Z_CreateUploadSessionReturns{} + if err := g.client.Call("Plugin.CreateUploadSession", _args, _returns); err != nil { + log.Printf("RPC call to CreateUploadSession API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) CreateUploadSession(args *Z_CreateUploadSessionArgs, returns *Z_CreateUploadSessionReturns) error { + if hook, ok := s.impl.(interface { + CreateUploadSession(us *model.UploadSession) (*model.UploadSession, error) + }); ok { + returns.A, returns.B = hook.CreateUploadSession(args.A) + returns.B = encodableError(returns.B) + } else { + return encodableError(fmt.Errorf("API CreateUploadSession called but not implemented.")) + } + return nil +} + +type Z_GetUploadSessionArgs struct { + A string +} + +type Z_GetUploadSessionReturns struct { + A *model.UploadSession + B error +} + +func (g *apiRPCClient) GetUploadSession(uploadID string) (*model.UploadSession, error) { + _args := &Z_GetUploadSessionArgs{uploadID} + _returns := &Z_GetUploadSessionReturns{} + if err := g.client.Call("Plugin.GetUploadSession", _args, _returns); err != nil { + log.Printf("RPC call to GetUploadSession API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetUploadSession(args *Z_GetUploadSessionArgs, returns *Z_GetUploadSessionReturns) error { + if hook, ok := s.impl.(interface { + GetUploadSession(uploadID string) (*model.UploadSession, error) + }); ok { + returns.A, returns.B = hook.GetUploadSession(args.A) + returns.B = encodableError(returns.B) + } else { + return encodableError(fmt.Errorf("API GetUploadSession called but not implemented.")) + } + return nil +} + +type Z_SendPushNotificationArgs struct { + A *model.PushNotification + B string +} + +type Z_SendPushNotificationReturns struct { + A *model.AppError +} + +func (g *apiRPCClient) SendPushNotification(notification *model.PushNotification, userID string) *model.AppError { + _args := &Z_SendPushNotificationArgs{notification, userID} + _returns := &Z_SendPushNotificationReturns{} + if err := g.client.Call("Plugin.SendPushNotification", _args, _returns); err != nil { + log.Printf("RPC call to SendPushNotification API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) SendPushNotification(args *Z_SendPushNotificationArgs, returns *Z_SendPushNotificationReturns) error { + if hook, ok := s.impl.(interface { + SendPushNotification(notification *model.PushNotification, userID string) *model.AppError + }); ok { + returns.A = hook.SendPushNotification(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API SendPushNotification called but not implemented.")) + } + return nil +} + +type Z_UpdateUserAuthArgs struct { + A string + B *model.UserAuth +} + +type Z_UpdateUserAuthReturns struct { + A *model.UserAuth + B *model.AppError +} + +func (g *apiRPCClient) UpdateUserAuth(userID string, userAuth *model.UserAuth) (*model.UserAuth, *model.AppError) { + _args := &Z_UpdateUserAuthArgs{userID, userAuth} + _returns := &Z_UpdateUserAuthReturns{} + if err := g.client.Call("Plugin.UpdateUserAuth", _args, _returns); err != nil { + log.Printf("RPC call to UpdateUserAuth API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) UpdateUserAuth(args *Z_UpdateUserAuthArgs, returns *Z_UpdateUserAuthReturns) error { + if hook, ok := s.impl.(interface { + UpdateUserAuth(userID string, userAuth *model.UserAuth) (*model.UserAuth, *model.AppError) + }); ok { + returns.A, returns.B = hook.UpdateUserAuth(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API UpdateUserAuth called but not implemented.")) + } + return nil +} + +type Z_RegisterPluginForSharedChannelsArgs struct { + A model.RegisterPluginOpts +} + +type Z_RegisterPluginForSharedChannelsReturns struct { + A string + B error +} + +func (g *apiRPCClient) RegisterPluginForSharedChannels(opts model.RegisterPluginOpts) (remoteID string, err error) { + _args := &Z_RegisterPluginForSharedChannelsArgs{opts} + _returns := &Z_RegisterPluginForSharedChannelsReturns{} + if err := g.client.Call("Plugin.RegisterPluginForSharedChannels", _args, _returns); err != nil { + log.Printf("RPC call to RegisterPluginForSharedChannels API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) RegisterPluginForSharedChannels(args *Z_RegisterPluginForSharedChannelsArgs, returns *Z_RegisterPluginForSharedChannelsReturns) error { + if hook, ok := s.impl.(interface { + RegisterPluginForSharedChannels(opts model.RegisterPluginOpts) (remoteID string, err error) + }); ok { + returns.A, returns.B = hook.RegisterPluginForSharedChannels(args.A) + returns.B = encodableError(returns.B) + } else { + return encodableError(fmt.Errorf("API RegisterPluginForSharedChannels called but not implemented.")) + } + return nil +} + +type Z_UnregisterPluginForSharedChannelsArgs struct { + A string +} + +type Z_UnregisterPluginForSharedChannelsReturns struct { + A error +} + +func (g *apiRPCClient) UnregisterPluginForSharedChannels(pluginID string) error { + _args := &Z_UnregisterPluginForSharedChannelsArgs{pluginID} + _returns := &Z_UnregisterPluginForSharedChannelsReturns{} + if err := g.client.Call("Plugin.UnregisterPluginForSharedChannels", _args, _returns); err != nil { + log.Printf("RPC call to UnregisterPluginForSharedChannels API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) UnregisterPluginForSharedChannels(args *Z_UnregisterPluginForSharedChannelsArgs, returns *Z_UnregisterPluginForSharedChannelsReturns) error { + if hook, ok := s.impl.(interface { + UnregisterPluginForSharedChannels(pluginID string) error + }); ok { + returns.A = hook.UnregisterPluginForSharedChannels(args.A) + returns.A = encodableError(returns.A) + } else { + return encodableError(fmt.Errorf("API UnregisterPluginForSharedChannels called but not implemented.")) + } + return nil +} + +type Z_ShareChannelArgs struct { + A *model.SharedChannel +} + +type Z_ShareChannelReturns struct { + A *model.SharedChannel + B error +} + +func (g *apiRPCClient) ShareChannel(sc *model.SharedChannel) (*model.SharedChannel, error) { + _args := &Z_ShareChannelArgs{sc} + _returns := &Z_ShareChannelReturns{} + if err := g.client.Call("Plugin.ShareChannel", _args, _returns); err != nil { + log.Printf("RPC call to ShareChannel API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) ShareChannel(args *Z_ShareChannelArgs, returns *Z_ShareChannelReturns) error { + if hook, ok := s.impl.(interface { + ShareChannel(sc *model.SharedChannel) (*model.SharedChannel, error) + }); ok { + returns.A, returns.B = hook.ShareChannel(args.A) + returns.B = encodableError(returns.B) + } else { + return encodableError(fmt.Errorf("API ShareChannel called but not implemented.")) + } + return nil +} + +type Z_UpdateSharedChannelArgs struct { + A *model.SharedChannel +} + +type Z_UpdateSharedChannelReturns struct { + A *model.SharedChannel + B error +} + +func (g *apiRPCClient) UpdateSharedChannel(sc *model.SharedChannel) (*model.SharedChannel, error) { + _args := &Z_UpdateSharedChannelArgs{sc} + _returns := &Z_UpdateSharedChannelReturns{} + if err := g.client.Call("Plugin.UpdateSharedChannel", _args, _returns); err != nil { + log.Printf("RPC call to UpdateSharedChannel API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) UpdateSharedChannel(args *Z_UpdateSharedChannelArgs, returns *Z_UpdateSharedChannelReturns) error { + if hook, ok := s.impl.(interface { + UpdateSharedChannel(sc *model.SharedChannel) (*model.SharedChannel, error) + }); ok { + returns.A, returns.B = hook.UpdateSharedChannel(args.A) + returns.B = encodableError(returns.B) + } else { + return encodableError(fmt.Errorf("API UpdateSharedChannel called but not implemented.")) + } + return nil +} + +type Z_UnshareChannelArgs struct { + A string +} + +type Z_UnshareChannelReturns struct { + A bool + B error +} + +func (g *apiRPCClient) UnshareChannel(channelID string) (unshared bool, err error) { + _args := &Z_UnshareChannelArgs{channelID} + _returns := &Z_UnshareChannelReturns{} + if err := g.client.Call("Plugin.UnshareChannel", _args, _returns); err != nil { + log.Printf("RPC call to UnshareChannel API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) UnshareChannel(args *Z_UnshareChannelArgs, returns *Z_UnshareChannelReturns) error { + if hook, ok := s.impl.(interface { + UnshareChannel(channelID string) (unshared bool, err error) + }); ok { + returns.A, returns.B = hook.UnshareChannel(args.A) + returns.B = encodableError(returns.B) + } else { + return encodableError(fmt.Errorf("API UnshareChannel called but not implemented.")) + } + return nil +} + +type Z_UpdateSharedChannelCursorArgs struct { + A string + B string + C model.GetPostsSinceForSyncCursor +} + +type Z_UpdateSharedChannelCursorReturns struct { + A error +} + +func (g *apiRPCClient) UpdateSharedChannelCursor(channelID, remoteID string, cusror model.GetPostsSinceForSyncCursor) error { + _args := &Z_UpdateSharedChannelCursorArgs{channelID, remoteID, cusror} + _returns := &Z_UpdateSharedChannelCursorReturns{} + if err := g.client.Call("Plugin.UpdateSharedChannelCursor", _args, _returns); err != nil { + log.Printf("RPC call to UpdateSharedChannelCursor API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) UpdateSharedChannelCursor(args *Z_UpdateSharedChannelCursorArgs, returns *Z_UpdateSharedChannelCursorReturns) error { + if hook, ok := s.impl.(interface { + UpdateSharedChannelCursor(channelID, remoteID string, cusror model.GetPostsSinceForSyncCursor) error + }); ok { + returns.A = hook.UpdateSharedChannelCursor(args.A, args.B, args.C) + returns.A = encodableError(returns.A) + } else { + return encodableError(fmt.Errorf("API UpdateSharedChannelCursor called but not implemented.")) + } + return nil +} + +type Z_SyncSharedChannelArgs struct { + A string +} + +type Z_SyncSharedChannelReturns struct { + A error +} + +func (g *apiRPCClient) SyncSharedChannel(channelID string) error { + _args := &Z_SyncSharedChannelArgs{channelID} + _returns := &Z_SyncSharedChannelReturns{} + if err := g.client.Call("Plugin.SyncSharedChannel", _args, _returns); err != nil { + log.Printf("RPC call to SyncSharedChannel API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) SyncSharedChannel(args *Z_SyncSharedChannelArgs, returns *Z_SyncSharedChannelReturns) error { + if hook, ok := s.impl.(interface { + SyncSharedChannel(channelID string) error + }); ok { + returns.A = hook.SyncSharedChannel(args.A) + returns.A = encodableError(returns.A) + } else { + return encodableError(fmt.Errorf("API SyncSharedChannel called but not implemented.")) + } + return nil +} + +type Z_InviteRemoteToChannelArgs struct { + A string + B string + C string + D bool +} + +type Z_InviteRemoteToChannelReturns struct { + A error +} + +func (g *apiRPCClient) InviteRemoteToChannel(channelID string, remoteID string, userID string, shareIfNotShared bool) error { + _args := &Z_InviteRemoteToChannelArgs{channelID, remoteID, userID, shareIfNotShared} + _returns := &Z_InviteRemoteToChannelReturns{} + if err := g.client.Call("Plugin.InviteRemoteToChannel", _args, _returns); err != nil { + log.Printf("RPC call to InviteRemoteToChannel API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) InviteRemoteToChannel(args *Z_InviteRemoteToChannelArgs, returns *Z_InviteRemoteToChannelReturns) error { + if hook, ok := s.impl.(interface { + InviteRemoteToChannel(channelID string, remoteID string, userID string, shareIfNotShared bool) error + }); ok { + returns.A = hook.InviteRemoteToChannel(args.A, args.B, args.C, args.D) + returns.A = encodableError(returns.A) + } else { + return encodableError(fmt.Errorf("API InviteRemoteToChannel called but not implemented.")) + } + return nil +} + +type Z_UninviteRemoteFromChannelArgs struct { + A string + B string +} + +type Z_UninviteRemoteFromChannelReturns struct { + A error +} + +func (g *apiRPCClient) UninviteRemoteFromChannel(channelID string, remoteID string) error { + _args := &Z_UninviteRemoteFromChannelArgs{channelID, remoteID} + _returns := &Z_UninviteRemoteFromChannelReturns{} + if err := g.client.Call("Plugin.UninviteRemoteFromChannel", _args, _returns); err != nil { + log.Printf("RPC call to UninviteRemoteFromChannel API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) UninviteRemoteFromChannel(args *Z_UninviteRemoteFromChannelArgs, returns *Z_UninviteRemoteFromChannelReturns) error { + if hook, ok := s.impl.(interface { + UninviteRemoteFromChannel(channelID string, remoteID string) error + }); ok { + returns.A = hook.UninviteRemoteFromChannel(args.A, args.B) + returns.A = encodableError(returns.A) + } else { + return encodableError(fmt.Errorf("API UninviteRemoteFromChannel called but not implemented.")) + } + return nil +} + +type Z_UpsertGroupMemberArgs struct { + A string + B string +} + +type Z_UpsertGroupMemberReturns struct { + A *model.GroupMember + B *model.AppError +} + +func (g *apiRPCClient) UpsertGroupMember(groupID string, userID string) (*model.GroupMember, *model.AppError) { + _args := &Z_UpsertGroupMemberArgs{groupID, userID} + _returns := &Z_UpsertGroupMemberReturns{} + if err := g.client.Call("Plugin.UpsertGroupMember", _args, _returns); err != nil { + log.Printf("RPC call to UpsertGroupMember API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) UpsertGroupMember(args *Z_UpsertGroupMemberArgs, returns *Z_UpsertGroupMemberReturns) error { + if hook, ok := s.impl.(interface { + UpsertGroupMember(groupID string, userID string) (*model.GroupMember, *model.AppError) + }); ok { + returns.A, returns.B = hook.UpsertGroupMember(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API UpsertGroupMember called but not implemented.")) + } + return nil +} + +type Z_UpsertGroupMembersArgs struct { + A string + B []string +} + +type Z_UpsertGroupMembersReturns struct { + A []*model.GroupMember + B *model.AppError +} + +func (g *apiRPCClient) UpsertGroupMembers(groupID string, userIDs []string) ([]*model.GroupMember, *model.AppError) { + _args := &Z_UpsertGroupMembersArgs{groupID, userIDs} + _returns := &Z_UpsertGroupMembersReturns{} + if err := g.client.Call("Plugin.UpsertGroupMembers", _args, _returns); err != nil { + log.Printf("RPC call to UpsertGroupMembers API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) UpsertGroupMembers(args *Z_UpsertGroupMembersArgs, returns *Z_UpsertGroupMembersReturns) error { + if hook, ok := s.impl.(interface { + UpsertGroupMembers(groupID string, userIDs []string) ([]*model.GroupMember, *model.AppError) + }); ok { + returns.A, returns.B = hook.UpsertGroupMembers(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API UpsertGroupMembers called but not implemented.")) + } + return nil +} + +type Z_GetGroupByRemoteIDArgs struct { + A string + B model.GroupSource +} + +type Z_GetGroupByRemoteIDReturns struct { + A *model.Group + B *model.AppError +} + +func (g *apiRPCClient) GetGroupByRemoteID(remoteID string, groupSource model.GroupSource) (*model.Group, *model.AppError) { + _args := &Z_GetGroupByRemoteIDArgs{remoteID, groupSource} + _returns := &Z_GetGroupByRemoteIDReturns{} + if err := g.client.Call("Plugin.GetGroupByRemoteID", _args, _returns); err != nil { + log.Printf("RPC call to GetGroupByRemoteID API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetGroupByRemoteID(args *Z_GetGroupByRemoteIDArgs, returns *Z_GetGroupByRemoteIDReturns) error { + if hook, ok := s.impl.(interface { + GetGroupByRemoteID(remoteID string, groupSource model.GroupSource) (*model.Group, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetGroupByRemoteID(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API GetGroupByRemoteID called but not implemented.")) + } + return nil +} + +type Z_CreateGroupArgs struct { + A *model.Group +} + +type Z_CreateGroupReturns struct { + A *model.Group + B *model.AppError +} + +func (g *apiRPCClient) CreateGroup(group *model.Group) (*model.Group, *model.AppError) { + _args := &Z_CreateGroupArgs{group} + _returns := &Z_CreateGroupReturns{} + if err := g.client.Call("Plugin.CreateGroup", _args, _returns); err != nil { + log.Printf("RPC call to CreateGroup API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) CreateGroup(args *Z_CreateGroupArgs, returns *Z_CreateGroupReturns) error { + if hook, ok := s.impl.(interface { + CreateGroup(group *model.Group) (*model.Group, *model.AppError) + }); ok { + returns.A, returns.B = hook.CreateGroup(args.A) + } else { + return encodableError(fmt.Errorf("API CreateGroup called but not implemented.")) + } + return nil +} + +type Z_UpdateGroupArgs struct { + A *model.Group +} + +type Z_UpdateGroupReturns struct { + A *model.Group + B *model.AppError +} + +func (g *apiRPCClient) UpdateGroup(group *model.Group) (*model.Group, *model.AppError) { + _args := &Z_UpdateGroupArgs{group} + _returns := &Z_UpdateGroupReturns{} + if err := g.client.Call("Plugin.UpdateGroup", _args, _returns); err != nil { + log.Printf("RPC call to UpdateGroup API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) UpdateGroup(args *Z_UpdateGroupArgs, returns *Z_UpdateGroupReturns) error { + if hook, ok := s.impl.(interface { + UpdateGroup(group *model.Group) (*model.Group, *model.AppError) + }); ok { + returns.A, returns.B = hook.UpdateGroup(args.A) + } else { + return encodableError(fmt.Errorf("API UpdateGroup called but not implemented.")) + } + return nil +} + +type Z_DeleteGroupArgs struct { + A string +} + +type Z_DeleteGroupReturns struct { + A *model.Group + B *model.AppError +} + +func (g *apiRPCClient) DeleteGroup(groupID string) (*model.Group, *model.AppError) { + _args := &Z_DeleteGroupArgs{groupID} + _returns := &Z_DeleteGroupReturns{} + if err := g.client.Call("Plugin.DeleteGroup", _args, _returns); err != nil { + log.Printf("RPC call to DeleteGroup API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) DeleteGroup(args *Z_DeleteGroupArgs, returns *Z_DeleteGroupReturns) error { + if hook, ok := s.impl.(interface { + DeleteGroup(groupID string) (*model.Group, *model.AppError) + }); ok { + returns.A, returns.B = hook.DeleteGroup(args.A) + } else { + return encodableError(fmt.Errorf("API DeleteGroup called but not implemented.")) + } + return nil +} + +type Z_RestoreGroupArgs struct { + A string +} + +type Z_RestoreGroupReturns struct { + A *model.Group + B *model.AppError +} + +func (g *apiRPCClient) RestoreGroup(groupID string) (*model.Group, *model.AppError) { + _args := &Z_RestoreGroupArgs{groupID} + _returns := &Z_RestoreGroupReturns{} + if err := g.client.Call("Plugin.RestoreGroup", _args, _returns); err != nil { + log.Printf("RPC call to RestoreGroup API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) RestoreGroup(args *Z_RestoreGroupArgs, returns *Z_RestoreGroupReturns) error { + if hook, ok := s.impl.(interface { + RestoreGroup(groupID string) (*model.Group, *model.AppError) + }); ok { + returns.A, returns.B = hook.RestoreGroup(args.A) + } else { + return encodableError(fmt.Errorf("API RestoreGroup called but not implemented.")) + } + return nil +} + +type Z_DeleteGroupMemberArgs struct { + A string + B string +} + +type Z_DeleteGroupMemberReturns struct { + A *model.GroupMember + B *model.AppError +} + +func (g *apiRPCClient) DeleteGroupMember(groupID string, userID string) (*model.GroupMember, *model.AppError) { + _args := &Z_DeleteGroupMemberArgs{groupID, userID} + _returns := &Z_DeleteGroupMemberReturns{} + if err := g.client.Call("Plugin.DeleteGroupMember", _args, _returns); err != nil { + log.Printf("RPC call to DeleteGroupMember API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) DeleteGroupMember(args *Z_DeleteGroupMemberArgs, returns *Z_DeleteGroupMemberReturns) error { + if hook, ok := s.impl.(interface { + DeleteGroupMember(groupID string, userID string) (*model.GroupMember, *model.AppError) + }); ok { + returns.A, returns.B = hook.DeleteGroupMember(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API DeleteGroupMember called but not implemented.")) + } + return nil +} + +type Z_GetGroupSyncableArgs struct { + A string + B string + C model.GroupSyncableType +} + +type Z_GetGroupSyncableReturns struct { + A *model.GroupSyncable + B *model.AppError +} + +func (g *apiRPCClient) GetGroupSyncable(groupID string, syncableID string, syncableType model.GroupSyncableType) (*model.GroupSyncable, *model.AppError) { + _args := &Z_GetGroupSyncableArgs{groupID, syncableID, syncableType} + _returns := &Z_GetGroupSyncableReturns{} + if err := g.client.Call("Plugin.GetGroupSyncable", _args, _returns); err != nil { + log.Printf("RPC call to GetGroupSyncable API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetGroupSyncable(args *Z_GetGroupSyncableArgs, returns *Z_GetGroupSyncableReturns) error { + if hook, ok := s.impl.(interface { + GetGroupSyncable(groupID string, syncableID string, syncableType model.GroupSyncableType) (*model.GroupSyncable, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetGroupSyncable(args.A, args.B, args.C) + } else { + return encodableError(fmt.Errorf("API GetGroupSyncable called but not implemented.")) + } + return nil +} + +type Z_GetGroupSyncablesArgs struct { + A string + B model.GroupSyncableType +} + +type Z_GetGroupSyncablesReturns struct { + A []*model.GroupSyncable + B *model.AppError +} + +func (g *apiRPCClient) GetGroupSyncables(groupID string, syncableType model.GroupSyncableType) ([]*model.GroupSyncable, *model.AppError) { + _args := &Z_GetGroupSyncablesArgs{groupID, syncableType} + _returns := &Z_GetGroupSyncablesReturns{} + if err := g.client.Call("Plugin.GetGroupSyncables", _args, _returns); err != nil { + log.Printf("RPC call to GetGroupSyncables API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetGroupSyncables(args *Z_GetGroupSyncablesArgs, returns *Z_GetGroupSyncablesReturns) error { + if hook, ok := s.impl.(interface { + GetGroupSyncables(groupID string, syncableType model.GroupSyncableType) ([]*model.GroupSyncable, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetGroupSyncables(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API GetGroupSyncables called but not implemented.")) + } + return nil +} + +type Z_UpsertGroupSyncableArgs struct { + A *model.GroupSyncable +} + +type Z_UpsertGroupSyncableReturns struct { + A *model.GroupSyncable + B *model.AppError +} + +func (g *apiRPCClient) UpsertGroupSyncable(groupSyncable *model.GroupSyncable) (*model.GroupSyncable, *model.AppError) { + _args := &Z_UpsertGroupSyncableArgs{groupSyncable} + _returns := &Z_UpsertGroupSyncableReturns{} + if err := g.client.Call("Plugin.UpsertGroupSyncable", _args, _returns); err != nil { + log.Printf("RPC call to UpsertGroupSyncable API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) UpsertGroupSyncable(args *Z_UpsertGroupSyncableArgs, returns *Z_UpsertGroupSyncableReturns) error { + if hook, ok := s.impl.(interface { + UpsertGroupSyncable(groupSyncable *model.GroupSyncable) (*model.GroupSyncable, *model.AppError) + }); ok { + returns.A, returns.B = hook.UpsertGroupSyncable(args.A) + } else { + return encodableError(fmt.Errorf("API UpsertGroupSyncable called but not implemented.")) + } + return nil +} + +type Z_UpdateGroupSyncableArgs struct { + A *model.GroupSyncable +} + +type Z_UpdateGroupSyncableReturns struct { + A *model.GroupSyncable + B *model.AppError +} + +func (g *apiRPCClient) UpdateGroupSyncable(groupSyncable *model.GroupSyncable) (*model.GroupSyncable, *model.AppError) { + _args := &Z_UpdateGroupSyncableArgs{groupSyncable} + _returns := &Z_UpdateGroupSyncableReturns{} + if err := g.client.Call("Plugin.UpdateGroupSyncable", _args, _returns); err != nil { + log.Printf("RPC call to UpdateGroupSyncable API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) UpdateGroupSyncable(args *Z_UpdateGroupSyncableArgs, returns *Z_UpdateGroupSyncableReturns) error { + if hook, ok := s.impl.(interface { + UpdateGroupSyncable(groupSyncable *model.GroupSyncable) (*model.GroupSyncable, *model.AppError) + }); ok { + returns.A, returns.B = hook.UpdateGroupSyncable(args.A) + } else { + return encodableError(fmt.Errorf("API UpdateGroupSyncable called but not implemented.")) + } + return nil +} + +type Z_DeleteGroupSyncableArgs struct { + A string + B string + C model.GroupSyncableType +} + +type Z_DeleteGroupSyncableReturns struct { + A *model.GroupSyncable + B *model.AppError +} + +func (g *apiRPCClient) DeleteGroupSyncable(groupID string, syncableID string, syncableType model.GroupSyncableType) (*model.GroupSyncable, *model.AppError) { + _args := &Z_DeleteGroupSyncableArgs{groupID, syncableID, syncableType} + _returns := &Z_DeleteGroupSyncableReturns{} + if err := g.client.Call("Plugin.DeleteGroupSyncable", _args, _returns); err != nil { + log.Printf("RPC call to DeleteGroupSyncable API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) DeleteGroupSyncable(args *Z_DeleteGroupSyncableArgs, returns *Z_DeleteGroupSyncableReturns) error { + if hook, ok := s.impl.(interface { + DeleteGroupSyncable(groupID string, syncableID string, syncableType model.GroupSyncableType) (*model.GroupSyncable, *model.AppError) + }); ok { + returns.A, returns.B = hook.DeleteGroupSyncable(args.A, args.B, args.C) + } else { + return encodableError(fmt.Errorf("API DeleteGroupSyncable called but not implemented.")) + } + return nil +} + +type Z_UpdateUserRolesArgs struct { + A string + B string +} + +type Z_UpdateUserRolesReturns struct { + A *model.User + B *model.AppError +} + +func (g *apiRPCClient) UpdateUserRoles(userID, newRoles string) (*model.User, *model.AppError) { + _args := &Z_UpdateUserRolesArgs{userID, newRoles} + _returns := &Z_UpdateUserRolesReturns{} + if err := g.client.Call("Plugin.UpdateUserRoles", _args, _returns); err != nil { + log.Printf("RPC call to UpdateUserRoles API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) UpdateUserRoles(args *Z_UpdateUserRolesArgs, returns *Z_UpdateUserRolesReturns) error { + if hook, ok := s.impl.(interface { + UpdateUserRoles(userID, newRoles string) (*model.User, *model.AppError) + }); ok { + returns.A, returns.B = hook.UpdateUserRoles(args.A, args.B) + } else { + return encodableError(fmt.Errorf("API UpdateUserRoles called but not implemented.")) + } + return nil +} + +type Z_GetPluginIDArgs struct { +} + +type Z_GetPluginIDReturns struct { + A string +} + +func (g *apiRPCClient) GetPluginID() string { + _args := &Z_GetPluginIDArgs{} + _returns := &Z_GetPluginIDReturns{} + if err := g.client.Call("Plugin.GetPluginID", _args, _returns); err != nil { + log.Printf("RPC call to GetPluginID API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) GetPluginID(args *Z_GetPluginIDArgs, returns *Z_GetPluginIDReturns) error { + if hook, ok := s.impl.(interface { + GetPluginID() string + }); ok { + returns.A = hook.GetPluginID() + } else { + return encodableError(fmt.Errorf("API GetPluginID called but not implemented.")) + } + return nil +} + +type Z_GetGroupsArgs struct { + A int + B int + C model.GroupSearchOpts + D *model.ViewUsersRestrictions +} + +type Z_GetGroupsReturns struct { + A []*model.Group + B *model.AppError +} + +func (g *apiRPCClient) GetGroups(page, perPage int, opts model.GroupSearchOpts, viewRestrictions *model.ViewUsersRestrictions) ([]*model.Group, *model.AppError) { + _args := &Z_GetGroupsArgs{page, perPage, opts, viewRestrictions} + _returns := &Z_GetGroupsReturns{} + if err := g.client.Call("Plugin.GetGroups", _args, _returns); err != nil { + log.Printf("RPC call to GetGroups API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetGroups(args *Z_GetGroupsArgs, returns *Z_GetGroupsReturns) error { + if hook, ok := s.impl.(interface { + GetGroups(page, perPage int, opts model.GroupSearchOpts, viewRestrictions *model.ViewUsersRestrictions) ([]*model.Group, *model.AppError) + }); ok { + returns.A, returns.B = hook.GetGroups(args.A, args.B, args.C, args.D) + } else { + return encodableError(fmt.Errorf("API GetGroups called but not implemented.")) + } + return nil +} + +type Z_CreateDefaultSyncableMembershipsArgs struct { + A model.CreateDefaultMembershipParams +} + +type Z_CreateDefaultSyncableMembershipsReturns struct { + A *model.AppError +} + +func (g *apiRPCClient) CreateDefaultSyncableMemberships(params model.CreateDefaultMembershipParams) *model.AppError { + _args := &Z_CreateDefaultSyncableMembershipsArgs{params} + _returns := &Z_CreateDefaultSyncableMembershipsReturns{} + if err := g.client.Call("Plugin.CreateDefaultSyncableMemberships", _args, _returns); err != nil { + log.Printf("RPC call to CreateDefaultSyncableMemberships API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) CreateDefaultSyncableMemberships(args *Z_CreateDefaultSyncableMembershipsArgs, returns *Z_CreateDefaultSyncableMembershipsReturns) error { + if hook, ok := s.impl.(interface { + CreateDefaultSyncableMemberships(params model.CreateDefaultMembershipParams) *model.AppError + }); ok { + returns.A = hook.CreateDefaultSyncableMemberships(args.A) + } else { + return encodableError(fmt.Errorf("API CreateDefaultSyncableMemberships called but not implemented.")) + } + return nil +} + +type Z_DeleteGroupConstrainedMembershipsArgs struct { +} + +type Z_DeleteGroupConstrainedMembershipsReturns struct { + A *model.AppError +} + +func (g *apiRPCClient) DeleteGroupConstrainedMemberships() *model.AppError { + _args := &Z_DeleteGroupConstrainedMembershipsArgs{} + _returns := &Z_DeleteGroupConstrainedMembershipsReturns{} + if err := g.client.Call("Plugin.DeleteGroupConstrainedMemberships", _args, _returns); err != nil { + log.Printf("RPC call to DeleteGroupConstrainedMemberships API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) DeleteGroupConstrainedMemberships(args *Z_DeleteGroupConstrainedMembershipsArgs, returns *Z_DeleteGroupConstrainedMembershipsReturns) error { + if hook, ok := s.impl.(interface { + DeleteGroupConstrainedMemberships() *model.AppError + }); ok { + returns.A = hook.DeleteGroupConstrainedMemberships() + } else { + return encodableError(fmt.Errorf("API DeleteGroupConstrainedMemberships called but not implemented.")) + } + return nil +} + +type Z_CreatePropertyFieldArgs struct { + A *model.PropertyField +} + +type Z_CreatePropertyFieldReturns struct { + A *model.PropertyField + B error +} + +func (g *apiRPCClient) CreatePropertyField(field *model.PropertyField) (*model.PropertyField, error) { + _args := &Z_CreatePropertyFieldArgs{field} + _returns := &Z_CreatePropertyFieldReturns{} + if err := g.client.Call("Plugin.CreatePropertyField", _args, _returns); err != nil { + log.Printf("RPC call to CreatePropertyField API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) CreatePropertyField(args *Z_CreatePropertyFieldArgs, returns *Z_CreatePropertyFieldReturns) error { + if hook, ok := s.impl.(interface { + CreatePropertyField(field *model.PropertyField) (*model.PropertyField, error) + }); ok { + returns.A, returns.B = hook.CreatePropertyField(args.A) + returns.B = encodableError(returns.B) + } else { + return encodableError(fmt.Errorf("API CreatePropertyField called but not implemented.")) + } + return nil +} + +type Z_GetPropertyFieldArgs struct { + A string + B string +} + +type Z_GetPropertyFieldReturns struct { + A *model.PropertyField + B error +} + +func (g *apiRPCClient) GetPropertyField(groupID, fieldID string) (*model.PropertyField, error) { + _args := &Z_GetPropertyFieldArgs{groupID, fieldID} + _returns := &Z_GetPropertyFieldReturns{} + if err := g.client.Call("Plugin.GetPropertyField", _args, _returns); err != nil { + log.Printf("RPC call to GetPropertyField API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetPropertyField(args *Z_GetPropertyFieldArgs, returns *Z_GetPropertyFieldReturns) error { + if hook, ok := s.impl.(interface { + GetPropertyField(groupID, fieldID string) (*model.PropertyField, error) + }); ok { + returns.A, returns.B = hook.GetPropertyField(args.A, args.B) + returns.B = encodableError(returns.B) + } else { + return encodableError(fmt.Errorf("API GetPropertyField called but not implemented.")) + } + return nil +} + +type Z_GetPropertyFieldsArgs struct { + A string + B []string +} + +type Z_GetPropertyFieldsReturns struct { + A []*model.PropertyField + B error +} + +func (g *apiRPCClient) GetPropertyFields(groupID string, ids []string) ([]*model.PropertyField, error) { + _args := &Z_GetPropertyFieldsArgs{groupID, ids} + _returns := &Z_GetPropertyFieldsReturns{} + if err := g.client.Call("Plugin.GetPropertyFields", _args, _returns); err != nil { + log.Printf("RPC call to GetPropertyFields API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetPropertyFields(args *Z_GetPropertyFieldsArgs, returns *Z_GetPropertyFieldsReturns) error { + if hook, ok := s.impl.(interface { + GetPropertyFields(groupID string, ids []string) ([]*model.PropertyField, error) + }); ok { + returns.A, returns.B = hook.GetPropertyFields(args.A, args.B) + returns.B = encodableError(returns.B) + } else { + return encodableError(fmt.Errorf("API GetPropertyFields called but not implemented.")) + } + return nil +} + +type Z_UpdatePropertyFieldArgs struct { + A string + B *model.PropertyField +} + +type Z_UpdatePropertyFieldReturns struct { + A *model.PropertyField + B error +} + +func (g *apiRPCClient) UpdatePropertyField(groupID string, field *model.PropertyField) (*model.PropertyField, error) { + _args := &Z_UpdatePropertyFieldArgs{groupID, field} + _returns := &Z_UpdatePropertyFieldReturns{} + if err := g.client.Call("Plugin.UpdatePropertyField", _args, _returns); err != nil { + log.Printf("RPC call to UpdatePropertyField API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) UpdatePropertyField(args *Z_UpdatePropertyFieldArgs, returns *Z_UpdatePropertyFieldReturns) error { + if hook, ok := s.impl.(interface { + UpdatePropertyField(groupID string, field *model.PropertyField) (*model.PropertyField, error) + }); ok { + returns.A, returns.B = hook.UpdatePropertyField(args.A, args.B) + returns.B = encodableError(returns.B) + } else { + return encodableError(fmt.Errorf("API UpdatePropertyField called but not implemented.")) + } + return nil +} + +type Z_DeletePropertyFieldArgs struct { + A string + B string +} + +type Z_DeletePropertyFieldReturns struct { + A error +} + +func (g *apiRPCClient) DeletePropertyField(groupID, fieldID string) error { + _args := &Z_DeletePropertyFieldArgs{groupID, fieldID} + _returns := &Z_DeletePropertyFieldReturns{} + if err := g.client.Call("Plugin.DeletePropertyField", _args, _returns); err != nil { + log.Printf("RPC call to DeletePropertyField API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) DeletePropertyField(args *Z_DeletePropertyFieldArgs, returns *Z_DeletePropertyFieldReturns) error { + if hook, ok := s.impl.(interface { + DeletePropertyField(groupID, fieldID string) error + }); ok { + returns.A = hook.DeletePropertyField(args.A, args.B) + returns.A = encodableError(returns.A) + } else { + return encodableError(fmt.Errorf("API DeletePropertyField called but not implemented.")) + } + return nil +} + +type Z_SearchPropertyFieldsArgs struct { + A string + B model.PropertyFieldSearchOpts +} + +type Z_SearchPropertyFieldsReturns struct { + A []*model.PropertyField + B error +} + +func (g *apiRPCClient) SearchPropertyFields(groupID string, opts model.PropertyFieldSearchOpts) ([]*model.PropertyField, error) { + _args := &Z_SearchPropertyFieldsArgs{groupID, opts} + _returns := &Z_SearchPropertyFieldsReturns{} + if err := g.client.Call("Plugin.SearchPropertyFields", _args, _returns); err != nil { + log.Printf("RPC call to SearchPropertyFields API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) SearchPropertyFields(args *Z_SearchPropertyFieldsArgs, returns *Z_SearchPropertyFieldsReturns) error { + if hook, ok := s.impl.(interface { + SearchPropertyFields(groupID string, opts model.PropertyFieldSearchOpts) ([]*model.PropertyField, error) + }); ok { + returns.A, returns.B = hook.SearchPropertyFields(args.A, args.B) + returns.B = encodableError(returns.B) + } else { + return encodableError(fmt.Errorf("API SearchPropertyFields called but not implemented.")) + } + return nil +} + +type Z_CountPropertyFieldsArgs struct { + A string + B bool +} + +type Z_CountPropertyFieldsReturns struct { + A int64 + B error +} + +func (g *apiRPCClient) CountPropertyFields(groupID string, includeDeleted bool) (int64, error) { + _args := &Z_CountPropertyFieldsArgs{groupID, includeDeleted} + _returns := &Z_CountPropertyFieldsReturns{} + if err := g.client.Call("Plugin.CountPropertyFields", _args, _returns); err != nil { + log.Printf("RPC call to CountPropertyFields API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) CountPropertyFields(args *Z_CountPropertyFieldsArgs, returns *Z_CountPropertyFieldsReturns) error { + if hook, ok := s.impl.(interface { + CountPropertyFields(groupID string, includeDeleted bool) (int64, error) + }); ok { + returns.A, returns.B = hook.CountPropertyFields(args.A, args.B) + returns.B = encodableError(returns.B) + } else { + return encodableError(fmt.Errorf("API CountPropertyFields called but not implemented.")) + } + return nil +} + +type Z_CountPropertyFieldsForTargetArgs struct { + A string + B string + C string + D bool +} + +type Z_CountPropertyFieldsForTargetReturns struct { + A int64 + B error +} + +func (g *apiRPCClient) CountPropertyFieldsForTarget(groupID, targetType, targetID string, includeDeleted bool) (int64, error) { + _args := &Z_CountPropertyFieldsForTargetArgs{groupID, targetType, targetID, includeDeleted} + _returns := &Z_CountPropertyFieldsForTargetReturns{} + if err := g.client.Call("Plugin.CountPropertyFieldsForTarget", _args, _returns); err != nil { + log.Printf("RPC call to CountPropertyFieldsForTarget API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) CountPropertyFieldsForTarget(args *Z_CountPropertyFieldsForTargetArgs, returns *Z_CountPropertyFieldsForTargetReturns) error { + if hook, ok := s.impl.(interface { + CountPropertyFieldsForTarget(groupID, targetType, targetID string, includeDeleted bool) (int64, error) + }); ok { + returns.A, returns.B = hook.CountPropertyFieldsForTarget(args.A, args.B, args.C, args.D) + returns.B = encodableError(returns.B) + } else { + return encodableError(fmt.Errorf("API CountPropertyFieldsForTarget called but not implemented.")) + } + return nil +} + +type Z_CreatePropertyValueArgs struct { + A *model.PropertyValue +} + +type Z_CreatePropertyValueReturns struct { + A *model.PropertyValue + B error +} + +func (g *apiRPCClient) CreatePropertyValue(value *model.PropertyValue) (*model.PropertyValue, error) { + _args := &Z_CreatePropertyValueArgs{value} + _returns := &Z_CreatePropertyValueReturns{} + if err := g.client.Call("Plugin.CreatePropertyValue", _args, _returns); err != nil { + log.Printf("RPC call to CreatePropertyValue API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) CreatePropertyValue(args *Z_CreatePropertyValueArgs, returns *Z_CreatePropertyValueReturns) error { + if hook, ok := s.impl.(interface { + CreatePropertyValue(value *model.PropertyValue) (*model.PropertyValue, error) + }); ok { + returns.A, returns.B = hook.CreatePropertyValue(args.A) + returns.B = encodableError(returns.B) + } else { + return encodableError(fmt.Errorf("API CreatePropertyValue called but not implemented.")) + } + return nil +} + +type Z_GetPropertyValueArgs struct { + A string + B string +} + +type Z_GetPropertyValueReturns struct { + A *model.PropertyValue + B error +} + +func (g *apiRPCClient) GetPropertyValue(groupID, valueID string) (*model.PropertyValue, error) { + _args := &Z_GetPropertyValueArgs{groupID, valueID} + _returns := &Z_GetPropertyValueReturns{} + if err := g.client.Call("Plugin.GetPropertyValue", _args, _returns); err != nil { + log.Printf("RPC call to GetPropertyValue API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetPropertyValue(args *Z_GetPropertyValueArgs, returns *Z_GetPropertyValueReturns) error { + if hook, ok := s.impl.(interface { + GetPropertyValue(groupID, valueID string) (*model.PropertyValue, error) + }); ok { + returns.A, returns.B = hook.GetPropertyValue(args.A, args.B) + returns.B = encodableError(returns.B) + } else { + return encodableError(fmt.Errorf("API GetPropertyValue called but not implemented.")) + } + return nil +} + +type Z_GetPropertyValuesArgs struct { + A string + B []string +} + +type Z_GetPropertyValuesReturns struct { + A []*model.PropertyValue + B error +} + +func (g *apiRPCClient) GetPropertyValues(groupID string, ids []string) ([]*model.PropertyValue, error) { + _args := &Z_GetPropertyValuesArgs{groupID, ids} + _returns := &Z_GetPropertyValuesReturns{} + if err := g.client.Call("Plugin.GetPropertyValues", _args, _returns); err != nil { + log.Printf("RPC call to GetPropertyValues API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetPropertyValues(args *Z_GetPropertyValuesArgs, returns *Z_GetPropertyValuesReturns) error { + if hook, ok := s.impl.(interface { + GetPropertyValues(groupID string, ids []string) ([]*model.PropertyValue, error) + }); ok { + returns.A, returns.B = hook.GetPropertyValues(args.A, args.B) + returns.B = encodableError(returns.B) + } else { + return encodableError(fmt.Errorf("API GetPropertyValues called but not implemented.")) + } + return nil +} + +type Z_UpdatePropertyValueArgs struct { + A string + B *model.PropertyValue +} + +type Z_UpdatePropertyValueReturns struct { + A *model.PropertyValue + B error +} + +func (g *apiRPCClient) UpdatePropertyValue(groupID string, value *model.PropertyValue) (*model.PropertyValue, error) { + _args := &Z_UpdatePropertyValueArgs{groupID, value} + _returns := &Z_UpdatePropertyValueReturns{} + if err := g.client.Call("Plugin.UpdatePropertyValue", _args, _returns); err != nil { + log.Printf("RPC call to UpdatePropertyValue API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) UpdatePropertyValue(args *Z_UpdatePropertyValueArgs, returns *Z_UpdatePropertyValueReturns) error { + if hook, ok := s.impl.(interface { + UpdatePropertyValue(groupID string, value *model.PropertyValue) (*model.PropertyValue, error) + }); ok { + returns.A, returns.B = hook.UpdatePropertyValue(args.A, args.B) + returns.B = encodableError(returns.B) + } else { + return encodableError(fmt.Errorf("API UpdatePropertyValue called but not implemented.")) + } + return nil +} + +type Z_UpsertPropertyValueArgs struct { + A *model.PropertyValue +} + +type Z_UpsertPropertyValueReturns struct { + A *model.PropertyValue + B error +} + +func (g *apiRPCClient) UpsertPropertyValue(value *model.PropertyValue) (*model.PropertyValue, error) { + _args := &Z_UpsertPropertyValueArgs{value} + _returns := &Z_UpsertPropertyValueReturns{} + if err := g.client.Call("Plugin.UpsertPropertyValue", _args, _returns); err != nil { + log.Printf("RPC call to UpsertPropertyValue API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) UpsertPropertyValue(args *Z_UpsertPropertyValueArgs, returns *Z_UpsertPropertyValueReturns) error { + if hook, ok := s.impl.(interface { + UpsertPropertyValue(value *model.PropertyValue) (*model.PropertyValue, error) + }); ok { + returns.A, returns.B = hook.UpsertPropertyValue(args.A) + returns.B = encodableError(returns.B) + } else { + return encodableError(fmt.Errorf("API UpsertPropertyValue called but not implemented.")) + } + return nil +} + +type Z_DeletePropertyValueArgs struct { + A string + B string +} + +type Z_DeletePropertyValueReturns struct { + A error +} + +func (g *apiRPCClient) DeletePropertyValue(groupID, valueID string) error { + _args := &Z_DeletePropertyValueArgs{groupID, valueID} + _returns := &Z_DeletePropertyValueReturns{} + if err := g.client.Call("Plugin.DeletePropertyValue", _args, _returns); err != nil { + log.Printf("RPC call to DeletePropertyValue API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) DeletePropertyValue(args *Z_DeletePropertyValueArgs, returns *Z_DeletePropertyValueReturns) error { + if hook, ok := s.impl.(interface { + DeletePropertyValue(groupID, valueID string) error + }); ok { + returns.A = hook.DeletePropertyValue(args.A, args.B) + returns.A = encodableError(returns.A) + } else { + return encodableError(fmt.Errorf("API DeletePropertyValue called but not implemented.")) + } + return nil +} + +type Z_SearchPropertyValuesArgs struct { + A string + B model.PropertyValueSearchOpts +} + +type Z_SearchPropertyValuesReturns struct { + A []*model.PropertyValue + B error +} + +func (g *apiRPCClient) SearchPropertyValues(groupID string, opts model.PropertyValueSearchOpts) ([]*model.PropertyValue, error) { + _args := &Z_SearchPropertyValuesArgs{groupID, opts} + _returns := &Z_SearchPropertyValuesReturns{} + if err := g.client.Call("Plugin.SearchPropertyValues", _args, _returns); err != nil { + log.Printf("RPC call to SearchPropertyValues API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) SearchPropertyValues(args *Z_SearchPropertyValuesArgs, returns *Z_SearchPropertyValuesReturns) error { + if hook, ok := s.impl.(interface { + SearchPropertyValues(groupID string, opts model.PropertyValueSearchOpts) ([]*model.PropertyValue, error) + }); ok { + returns.A, returns.B = hook.SearchPropertyValues(args.A, args.B) + returns.B = encodableError(returns.B) + } else { + return encodableError(fmt.Errorf("API SearchPropertyValues called but not implemented.")) + } + return nil +} + +type Z_RegisterPropertyGroupArgs struct { + A string +} + +type Z_RegisterPropertyGroupReturns struct { + A *model.PropertyGroup + B error +} + +func (g *apiRPCClient) RegisterPropertyGroup(name string) (*model.PropertyGroup, error) { + _args := &Z_RegisterPropertyGroupArgs{name} + _returns := &Z_RegisterPropertyGroupReturns{} + if err := g.client.Call("Plugin.RegisterPropertyGroup", _args, _returns); err != nil { + log.Printf("RPC call to RegisterPropertyGroup API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) RegisterPropertyGroup(args *Z_RegisterPropertyGroupArgs, returns *Z_RegisterPropertyGroupReturns) error { + if hook, ok := s.impl.(interface { + RegisterPropertyGroup(name string) (*model.PropertyGroup, error) + }); ok { + returns.A, returns.B = hook.RegisterPropertyGroup(args.A) + returns.B = encodableError(returns.B) + } else { + return encodableError(fmt.Errorf("API RegisterPropertyGroup called but not implemented.")) + } + return nil +} + +type Z_GetPropertyGroupArgs struct { + A string +} + +type Z_GetPropertyGroupReturns struct { + A *model.PropertyGroup + B error +} + +func (g *apiRPCClient) GetPropertyGroup(name string) (*model.PropertyGroup, error) { + _args := &Z_GetPropertyGroupArgs{name} + _returns := &Z_GetPropertyGroupReturns{} + if err := g.client.Call("Plugin.GetPropertyGroup", _args, _returns); err != nil { + log.Printf("RPC call to GetPropertyGroup API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetPropertyGroup(args *Z_GetPropertyGroupArgs, returns *Z_GetPropertyGroupReturns) error { + if hook, ok := s.impl.(interface { + GetPropertyGroup(name string) (*model.PropertyGroup, error) + }); ok { + returns.A, returns.B = hook.GetPropertyGroup(args.A) + returns.B = encodableError(returns.B) + } else { + return encodableError(fmt.Errorf("API GetPropertyGroup called but not implemented.")) + } + return nil +} + +type Z_GetPropertyFieldByNameArgs struct { + A string + B string + C string +} + +type Z_GetPropertyFieldByNameReturns struct { + A *model.PropertyField + B error +} + +func (g *apiRPCClient) GetPropertyFieldByName(groupID, targetID, name string) (*model.PropertyField, error) { + _args := &Z_GetPropertyFieldByNameArgs{groupID, targetID, name} + _returns := &Z_GetPropertyFieldByNameReturns{} + if err := g.client.Call("Plugin.GetPropertyFieldByName", _args, _returns); err != nil { + log.Printf("RPC call to GetPropertyFieldByName API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) GetPropertyFieldByName(args *Z_GetPropertyFieldByNameArgs, returns *Z_GetPropertyFieldByNameReturns) error { + if hook, ok := s.impl.(interface { + GetPropertyFieldByName(groupID, targetID, name string) (*model.PropertyField, error) + }); ok { + returns.A, returns.B = hook.GetPropertyFieldByName(args.A, args.B, args.C) + returns.B = encodableError(returns.B) + } else { + return encodableError(fmt.Errorf("API GetPropertyFieldByName called but not implemented.")) + } + return nil +} + +type Z_UpdatePropertyFieldsArgs struct { + A string + B []*model.PropertyField +} + +type Z_UpdatePropertyFieldsReturns struct { + A []*model.PropertyField + B error +} + +func (g *apiRPCClient) UpdatePropertyFields(groupID string, fields []*model.PropertyField) ([]*model.PropertyField, error) { + _args := &Z_UpdatePropertyFieldsArgs{groupID, fields} + _returns := &Z_UpdatePropertyFieldsReturns{} + if err := g.client.Call("Plugin.UpdatePropertyFields", _args, _returns); err != nil { + log.Printf("RPC call to UpdatePropertyFields API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) UpdatePropertyFields(args *Z_UpdatePropertyFieldsArgs, returns *Z_UpdatePropertyFieldsReturns) error { + if hook, ok := s.impl.(interface { + UpdatePropertyFields(groupID string, fields []*model.PropertyField) ([]*model.PropertyField, error) + }); ok { + returns.A, returns.B = hook.UpdatePropertyFields(args.A, args.B) + returns.B = encodableError(returns.B) + } else { + return encodableError(fmt.Errorf("API UpdatePropertyFields called but not implemented.")) + } + return nil +} + +type Z_UpdatePropertyValuesArgs struct { + A string + B []*model.PropertyValue +} + +type Z_UpdatePropertyValuesReturns struct { + A []*model.PropertyValue + B error +} + +func (g *apiRPCClient) UpdatePropertyValues(groupID string, values []*model.PropertyValue) ([]*model.PropertyValue, error) { + _args := &Z_UpdatePropertyValuesArgs{groupID, values} + _returns := &Z_UpdatePropertyValuesReturns{} + if err := g.client.Call("Plugin.UpdatePropertyValues", _args, _returns); err != nil { + log.Printf("RPC call to UpdatePropertyValues API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) UpdatePropertyValues(args *Z_UpdatePropertyValuesArgs, returns *Z_UpdatePropertyValuesReturns) error { + if hook, ok := s.impl.(interface { + UpdatePropertyValues(groupID string, values []*model.PropertyValue) ([]*model.PropertyValue, error) + }); ok { + returns.A, returns.B = hook.UpdatePropertyValues(args.A, args.B) + returns.B = encodableError(returns.B) + } else { + return encodableError(fmt.Errorf("API UpdatePropertyValues called but not implemented.")) + } + return nil +} + +type Z_UpsertPropertyValuesArgs struct { + A []*model.PropertyValue +} + +type Z_UpsertPropertyValuesReturns struct { + A []*model.PropertyValue + B error +} + +func (g *apiRPCClient) UpsertPropertyValues(values []*model.PropertyValue) ([]*model.PropertyValue, error) { + _args := &Z_UpsertPropertyValuesArgs{values} + _returns := &Z_UpsertPropertyValuesReturns{} + if err := g.client.Call("Plugin.UpsertPropertyValues", _args, _returns); err != nil { + log.Printf("RPC call to UpsertPropertyValues API failed: %s", err.Error()) + } + return _returns.A, _returns.B +} + +func (s *apiRPCServer) UpsertPropertyValues(args *Z_UpsertPropertyValuesArgs, returns *Z_UpsertPropertyValuesReturns) error { + if hook, ok := s.impl.(interface { + UpsertPropertyValues(values []*model.PropertyValue) ([]*model.PropertyValue, error) + }); ok { + returns.A, returns.B = hook.UpsertPropertyValues(args.A) + returns.B = encodableError(returns.B) + } else { + return encodableError(fmt.Errorf("API UpsertPropertyValues called but not implemented.")) + } + return nil +} + +type Z_DeletePropertyValuesForTargetArgs struct { + A string + B string + C string +} + +type Z_DeletePropertyValuesForTargetReturns struct { + A error +} + +func (g *apiRPCClient) DeletePropertyValuesForTarget(groupID, targetType, targetID string) error { + _args := &Z_DeletePropertyValuesForTargetArgs{groupID, targetType, targetID} + _returns := &Z_DeletePropertyValuesForTargetReturns{} + if err := g.client.Call("Plugin.DeletePropertyValuesForTarget", _args, _returns); err != nil { + log.Printf("RPC call to DeletePropertyValuesForTarget API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) DeletePropertyValuesForTarget(args *Z_DeletePropertyValuesForTargetArgs, returns *Z_DeletePropertyValuesForTargetReturns) error { + if hook, ok := s.impl.(interface { + DeletePropertyValuesForTarget(groupID, targetType, targetID string) error + }); ok { + returns.A = hook.DeletePropertyValuesForTarget(args.A, args.B, args.C) + returns.A = encodableError(returns.A) + } else { + return encodableError(fmt.Errorf("API DeletePropertyValuesForTarget called but not implemented.")) + } + return nil +} + +type Z_DeletePropertyValuesForFieldArgs struct { + A string + B string +} + +type Z_DeletePropertyValuesForFieldReturns struct { + A error +} + +func (g *apiRPCClient) DeletePropertyValuesForField(groupID, fieldID string) error { + _args := &Z_DeletePropertyValuesForFieldArgs{groupID, fieldID} + _returns := &Z_DeletePropertyValuesForFieldReturns{} + if err := g.client.Call("Plugin.DeletePropertyValuesForField", _args, _returns); err != nil { + log.Printf("RPC call to DeletePropertyValuesForField API failed: %s", err.Error()) + } + return _returns.A +} + +func (s *apiRPCServer) DeletePropertyValuesForField(args *Z_DeletePropertyValuesForFieldArgs, returns *Z_DeletePropertyValuesForFieldReturns) error { + if hook, ok := s.impl.(interface { + DeletePropertyValuesForField(groupID, fieldID string) error + }); ok { + returns.A = hook.DeletePropertyValuesForField(args.A, args.B) + returns.A = encodableError(returns.A) + } else { + return encodableError(fmt.Errorf("API DeletePropertyValuesForField called but not implemented.")) + } + return nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/plugin/context.go b/vendor/github.com/mattermost/mattermost/server/public/plugin/context.go new file mode 100644 index 00000000..bfd3c2e5 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/plugin/context.go @@ -0,0 +1,15 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package plugin + +// Context passes through metadata about the request or hook event. +// For requests this is built in app/plugin_requests.go +// For hooks, app.PluginContext() is called. +type Context struct { + SessionId string + RequestId string + IPAddress string + AcceptLanguage string + UserAgent string +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/plugin/db_rpc.go b/vendor/github.com/mattermost/mattermost/server/public/plugin/db_rpc.go new file mode 100644 index 00000000..7c88406e --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/plugin/db_rpc.go @@ -0,0 +1,459 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package plugin + +import ( + "database/sql/driver" + "log" + "net/rpc" +) + +// dbRPCClient contains the client-side logic to handle the RPC communication +// with the server. It's API is hand-written because we do not expect +// new methods to be added very frequently. +type dbRPCClient struct { + client *rpc.Client +} + +// dbRPCServer is the server-side component which is responsible for calling +// the driver methods and properly encoding the responses back to the RPC client. +type dbRPCServer struct { + dbImpl Driver +} + +var _ Driver = &dbRPCClient{} + +type Z_DbStrErrReturn struct { + A string + B error +} + +type Z_DbErrReturn struct { + A error +} + +type Z_DbInt64ErrReturn struct { + A int64 + B error +} + +type Z_DbBoolReturn struct { + A bool +} + +func (db *dbRPCClient) Conn(isMaster bool) (string, error) { + ret := &Z_DbStrErrReturn{} + err := db.client.Call("Plugin.Conn", isMaster, ret) + if err != nil { + log.Printf("error during Plugin.Conn: %v", err) + } + ret.B = decodableError(ret.B) + return ret.A, ret.B +} + +func (db *dbRPCServer) Conn(isMaster bool, ret *Z_DbStrErrReturn) error { + ret.A, ret.B = db.dbImpl.Conn(isMaster) + ret.B = encodableError(ret.B) + return nil +} + +func (db *dbRPCClient) ConnPing(connID string) error { + ret := &Z_DbErrReturn{} + err := db.client.Call("Plugin.ConnPing", connID, ret) + if err != nil { + log.Printf("error during Plugin.ConnPing: %v", err) + } + ret.A = decodableError(ret.A) + return ret.A +} + +func (db *dbRPCServer) ConnPing(connID string, ret *Z_DbErrReturn) error { + ret.A = db.dbImpl.ConnPing(connID) + ret.A = encodableError(ret.A) + return nil +} + +func (db *dbRPCClient) ConnClose(connID string) error { + ret := &Z_DbErrReturn{} + err := db.client.Call("Plugin.ConnClose", connID, ret) + if err != nil { + log.Printf("error during Plugin.ConnClose: %v", err) + } + ret.A = decodableError(ret.A) + return ret.A +} + +func (db *dbRPCServer) ConnClose(connID string, ret *Z_DbErrReturn) error { + ret.A = db.dbImpl.ConnClose(connID) + ret.A = encodableError(ret.A) + return nil +} + +type Z_DbTxArgs struct { + A string + B driver.TxOptions +} + +func (db *dbRPCClient) Tx(connID string, opts driver.TxOptions) (string, error) { + args := &Z_DbTxArgs{ + A: connID, + B: opts, + } + ret := &Z_DbStrErrReturn{} + err := db.client.Call("Plugin.Tx", args, ret) + if err != nil { + log.Printf("error during Plugin.Tx: %v", err) + } + ret.B = decodableError(ret.B) + return ret.A, ret.B +} + +func (db *dbRPCServer) Tx(args *Z_DbTxArgs, ret *Z_DbStrErrReturn) error { + ret.A, ret.B = db.dbImpl.Tx(args.A, args.B) + ret.B = encodableError(ret.B) + return nil +} + +func (db *dbRPCClient) TxCommit(txID string) error { + ret := &Z_DbErrReturn{} + err := db.client.Call("Plugin.TxCommit", txID, ret) + if err != nil { + log.Printf("error during Plugin.TxCommit: %v", err) + } + ret.A = decodableError(ret.A) + return ret.A +} + +func (db *dbRPCServer) TxCommit(txID string, ret *Z_DbErrReturn) error { + ret.A = db.dbImpl.TxCommit(txID) + ret.A = encodableError(ret.A) + return nil +} + +func (db *dbRPCClient) TxRollback(txID string) error { + ret := &Z_DbErrReturn{} + err := db.client.Call("Plugin.TxRollback", txID, ret) + if err != nil { + log.Printf("error during Plugin.TxRollback: %v", err) + } + ret.A = decodableError(ret.A) + return ret.A +} + +func (db *dbRPCServer) TxRollback(txID string, ret *Z_DbErrReturn) error { + ret.A = db.dbImpl.TxRollback(txID) + ret.A = encodableError(ret.A) + return nil +} + +type Z_DbStmtArgs struct { + A string + B string +} + +func (db *dbRPCClient) Stmt(connID, q string) (string, error) { + args := &Z_DbStmtArgs{ + A: connID, + B: q, + } + ret := &Z_DbStrErrReturn{} + err := db.client.Call("Plugin.Stmt", args, ret) + if err != nil { + log.Printf("error during Plugin.Stmt: %v", err) + } + ret.B = decodableError(ret.B) + return ret.A, ret.B +} + +func (db *dbRPCServer) Stmt(args *Z_DbStmtArgs, ret *Z_DbStrErrReturn) error { + ret.A, ret.B = db.dbImpl.Stmt(args.A, args.B) + ret.B = encodableError(ret.B) + return nil +} + +func (db *dbRPCClient) StmtClose(stID string) error { + ret := &Z_DbErrReturn{} + err := db.client.Call("Plugin.StmtClose", stID, ret) + if err != nil { + log.Printf("error during Plugin.StmtClose: %v", err) + } + ret.A = decodableError(ret.A) + return ret.A +} + +func (db *dbRPCServer) StmtClose(stID string, ret *Z_DbErrReturn) error { + ret.A = db.dbImpl.StmtClose(stID) + ret.A = encodableError(ret.A) + return nil +} + +type Z_DbIntReturn struct { + A int +} + +func (db *dbRPCClient) StmtNumInput(stID string) int { + ret := &Z_DbIntReturn{} + err := db.client.Call("Plugin.StmtNumInput", stID, ret) + if err != nil { + log.Printf("error during Plugin.StmtNumInput: %v", err) + } + return ret.A +} + +func (db *dbRPCServer) StmtNumInput(stID string, ret *Z_DbIntReturn) error { + ret.A = db.dbImpl.StmtNumInput(stID) + return nil +} + +type Z_DbStmtQueryArgs struct { + A string + B []driver.NamedValue +} + +func (db *dbRPCClient) StmtQuery(stID string, argVals []driver.NamedValue) (string, error) { + args := &Z_DbStmtQueryArgs{ + A: stID, + B: argVals, + } + ret := &Z_DbStrErrReturn{} + err := db.client.Call("Plugin.StmtQuery", args, ret) + if err != nil { + log.Printf("error during Plugin.StmtQuery: %v", err) + } + ret.B = decodableError(ret.B) + return ret.A, ret.B +} + +func (db *dbRPCServer) StmtQuery(args *Z_DbStmtQueryArgs, ret *Z_DbStrErrReturn) error { + ret.A, ret.B = db.dbImpl.StmtQuery(args.A, args.B) + ret.B = encodableError(ret.B) + return nil +} + +func (db *dbRPCClient) StmtExec(stID string, argVals []driver.NamedValue) (ResultContainer, error) { + args := &Z_DbStmtQueryArgs{ + A: stID, + B: argVals, + } + ret := &Z_DbResultContErrReturn{} + err := db.client.Call("Plugin.StmtExec", args, ret) + if err != nil { + log.Printf("error during Plugin.StmtExec: %v", err) + } + ret.A.LastIDError = decodableError(ret.A.LastIDError) + ret.A.RowsAffectedError = decodableError(ret.A.RowsAffectedError) + ret.B = decodableError(ret.B) + return ret.A, ret.B +} + +func (db *dbRPCServer) StmtExec(args *Z_DbStmtQueryArgs, ret *Z_DbResultContErrReturn) error { + ret.A, ret.B = db.dbImpl.StmtExec(args.A, args.B) + ret.A.LastIDError = encodableError(ret.A.LastIDError) + ret.A.RowsAffectedError = encodableError(ret.A.RowsAffectedError) + ret.B = encodableError(ret.B) + return nil +} + +type Z_DbConnArgs struct { + A string + B string + C []driver.NamedValue +} + +func (db *dbRPCClient) ConnQuery(connID, q string, argVals []driver.NamedValue) (string, error) { + args := &Z_DbConnArgs{ + A: connID, + B: q, + C: argVals, + } + ret := &Z_DbStrErrReturn{} + err := db.client.Call("Plugin.ConnQuery", args, ret) + if err != nil { + log.Printf("error during Plugin.ConnQuery: %v", err) + } + ret.B = decodableError(ret.B) + return ret.A, ret.B +} + +func (db *dbRPCServer) ConnQuery(args *Z_DbConnArgs, ret *Z_DbStrErrReturn) error { + ret.A, ret.B = db.dbImpl.ConnQuery(args.A, args.B, args.C) + ret.B = encodableError(ret.B) + return nil +} + +type Z_DbResultContErrReturn struct { + A ResultContainer + B error +} + +func (db *dbRPCClient) ConnExec(connID, q string, argVals []driver.NamedValue) (ResultContainer, error) { + args := &Z_DbConnArgs{ + A: connID, + B: q, + C: argVals, + } + ret := &Z_DbResultContErrReturn{} + err := db.client.Call("Plugin.ConnExec", args, ret) + if err != nil { + log.Printf("error during Plugin.ConnExec: %v", err) + } + ret.A.LastIDError = decodableError(ret.A.LastIDError) + ret.A.RowsAffectedError = decodableError(ret.A.RowsAffectedError) + ret.B = decodableError(ret.B) + return ret.A, ret.B +} + +func (db *dbRPCServer) ConnExec(args *Z_DbConnArgs, ret *Z_DbResultContErrReturn) error { + ret.A, ret.B = db.dbImpl.ConnExec(args.A, args.B, args.C) + ret.A.LastIDError = encodableError(ret.A.LastIDError) + ret.A.RowsAffectedError = encodableError(ret.A.RowsAffectedError) + ret.B = encodableError(ret.B) + return nil +} + +type Z_DbStrSliceReturn struct { + A []string +} + +func (db *dbRPCClient) RowsColumns(rowsID string) []string { + ret := &Z_DbStrSliceReturn{} + err := db.client.Call("Plugin.RowsColumns", rowsID, ret) + if err != nil { + log.Printf("error during Plugin.RowsColumns: %v", err) + } + return ret.A +} + +func (db *dbRPCServer) RowsColumns(rowsID string, ret *Z_DbStrSliceReturn) error { + ret.A = db.dbImpl.RowsColumns(rowsID) + return nil +} + +func (db *dbRPCClient) RowsClose(resID string) error { + ret := &Z_DbErrReturn{} + err := db.client.Call("Plugin.RowsClose", resID, ret) + if err != nil { + log.Printf("error during Plugin.RowsClose: %v", err) + } + ret.A = decodableError(ret.A) + return ret.A +} + +func (db *dbRPCServer) RowsClose(resID string, ret *Z_DbErrReturn) error { + ret.A = db.dbImpl.RowsClose(resID) + ret.A = encodableError(ret.A) + return nil +} + +type Z_DbRowScanReturn struct { + A error + B []driver.Value +} + +type Z_DbRowScanArg struct { + A string + B []driver.Value +} + +func (db *dbRPCClient) RowsNext(rowsID string, dest []driver.Value) error { + args := &Z_DbRowScanArg{ + A: rowsID, + B: dest, + } + ret := &Z_DbRowScanReturn{} + err := db.client.Call("Plugin.RowsNext", args, ret) + if err != nil { + log.Printf("error during Plugin.RowsNext: %v", err) + } + ret.A = decodableError(ret.A) + copy(dest, ret.B) + return ret.A +} + +func (db *dbRPCServer) RowsNext(args *Z_DbRowScanArg, ret *Z_DbRowScanReturn) error { + ret.A = db.dbImpl.RowsNext(args.A, args.B) + ret.A = encodableError(ret.A) + // Trick to populate the dest slice. RPC doesn't have a semantic to populate + // pointer type args. So the only way to pass values is via args, and only way + // to return values is via the return struct. + ret.B = args.B + return nil +} + +func (db *dbRPCClient) RowsHasNextResultSet(rowsID string) bool { + ret := &Z_DbBoolReturn{} + err := db.client.Call("Plugin.RowsHasNextResultSet", rowsID, ret) + if err != nil { + log.Printf("error during Plugin.RowsHasNextResultSet: %v", err) + } + return ret.A +} + +func (db *dbRPCServer) RowsHasNextResultSet(rowsID string, ret *Z_DbBoolReturn) error { + ret.A = db.dbImpl.RowsHasNextResultSet(rowsID) + return nil +} + +func (db *dbRPCClient) RowsNextResultSet(rowsID string) error { + ret := &Z_DbErrReturn{} + err := db.client.Call("Plugin.RowsNextResultSet", rowsID, ret) + if err != nil { + log.Printf("error during Plugin.RowsNextResultSet: %v", err) + } + ret.A = decodableError(ret.A) + return ret.A +} + +func (db *dbRPCServer) RowsNextResultSet(rowsID string, ret *Z_DbErrReturn) error { + ret.A = db.dbImpl.RowsNextResultSet(rowsID) + ret.A = encodableError(ret.A) + return nil +} + +type Z_DbRowsColumnArg struct { + A string + B int +} + +func (db *dbRPCClient) RowsColumnTypeDatabaseTypeName(rowsID string, index int) string { + args := &Z_DbRowsColumnArg{ + A: rowsID, + B: index, + } + var ret string + err := db.client.Call("Plugin.RowsColumnTypeDatabaseTypeName", args, &ret) + if err != nil { + log.Printf("error during Plugin.RowsColumnTypeDatabaseTypeName: %v", err) + } + return ret +} + +func (db *dbRPCServer) RowsColumnTypeDatabaseTypeName(args *Z_DbRowsColumnArg, ret *string) error { + *ret = db.dbImpl.RowsColumnTypeDatabaseTypeName(args.A, args.B) + return nil +} + +type Z_DbRowsColumnTypePrecisionScaleReturn struct { + A int64 + B int64 + C bool +} + +func (db *dbRPCClient) RowsColumnTypePrecisionScale(rowsID string, index int) (int64, int64, bool) { + args := &Z_DbRowsColumnArg{ + A: rowsID, + B: index, + } + ret := &Z_DbRowsColumnTypePrecisionScaleReturn{} + err := db.client.Call("Plugin.RowsColumnTypePrecisionScale", args, ret) + if err != nil { + log.Printf("error during Plugin.RowsColumnTypePrecisionScale: %v", err) + } + return ret.A, ret.B, ret.C +} + +func (db *dbRPCServer) RowsColumnTypePrecisionScale(args *Z_DbRowsColumnArg, ret *Z_DbRowsColumnTypePrecisionScaleReturn) error { + ret.A, ret.B, ret.C = db.dbImpl.RowsColumnTypePrecisionScale(args.A, args.B) + return nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/plugin/doc.go b/vendor/github.com/mattermost/mattermost/server/public/plugin/doc.go new file mode 100644 index 00000000..b6806365 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/plugin/doc.go @@ -0,0 +1,9 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +// The plugin package is used by Mattermost server plugins written in go. It also enables the +// Mattermost server to manage and interact with the running plugin environment. +// +// Note that this package exports a large number of types prefixed with Z_. These are public only +// to allow their use with Hashicorp's go-plugin (and net/rpc). Do not use these directly. +package plugin diff --git a/vendor/github.com/mattermost/mattermost/server/public/plugin/driver.go b/vendor/github.com/mattermost/mattermost/server/public/plugin/driver.go new file mode 100644 index 00000000..ba5c5d9a --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/plugin/driver.go @@ -0,0 +1,75 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package plugin + +import ( + "database/sql/driver" +) + +// ResultContainer contains the output from the LastInsertID +// and RowsAffected methods for a given set of rows. +// It is used to embed another round-trip to the server, +// and helping to avoid tracking results on the server. +type ResultContainer struct { + LastID int64 + LastIDError error + RowsAffected int64 + RowsAffectedError error +} + +// Driver is a sql driver interface that is used by plugins to perform +// raw SQL queries without opening DB connections by themselves. This interface +// is not subject to backward compatibility guarantees and is only meant to be +// used by plugins built by the Mattermost team. +type Driver interface { + // Connection + Conn(isMaster bool) (string, error) + ConnPing(connID string) error + ConnClose(connID string) error + ConnQuery(connID, q string, args []driver.NamedValue) (string, error) // rows + ConnExec(connID, q string, args []driver.NamedValue) (ResultContainer, error) // result + + // Transaction + Tx(connID string, opts driver.TxOptions) (string, error) + TxCommit(txID string) error + TxRollback(txID string) error + + // Statement + Stmt(connID, q string) (string, error) + StmtClose(stID string) error + StmtNumInput(stID string) int + StmtQuery(stID string, args []driver.NamedValue) (string, error) // rows + StmtExec(stID string, args []driver.NamedValue) (ResultContainer, error) // result + + // Rows + RowsColumns(rowsID string) []string + RowsClose(rowsID string) error + RowsNext(rowsID string, dest []driver.Value) error + RowsHasNextResultSet(rowsID string) bool + RowsNextResultSet(rowsID string) error + RowsColumnTypeDatabaseTypeName(rowsID string, index int) string + RowsColumnTypePrecisionScale(rowsID string, index int) (int64, int64, bool) + + // TODO: add this + // RowsColumnScanType(rowsID string, index int) reflect.Type + + // Note: the following cannot be implemented because either MySQL or PG + // does not support it. So this implementation has to be a common subset + // of both DB implementations. + // RowsColumnTypeLength(rowsID string, index int) (int64, bool) + // RowsColumnTypeNullable(rowsID string, index int) (bool, bool) + // ResetSession(ctx context.Context) error + // IsValid() bool +} + +// AppDriver is an extension of the Driver interface to capture non-RPC APIs. +type AppDriver interface { + Driver + + // ConnWithPluginID is only used by the server, and isn't exposed via the RPC API. + ConnWithPluginID(isMaster bool, pluginID string) (string, error) + // This is an extra method needed to shutdown connections + // after a plugin shuts down. + ShutdownConns(pluginID string) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/plugin/environment.go b/vendor/github.com/mattermost/mattermost/server/public/plugin/environment.go new file mode 100644 index 00000000..6d25eba6 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/plugin/environment.go @@ -0,0 +1,680 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package plugin + +import ( + "fmt" + "hash/fnv" + "os" + "path/filepath" + "sync" + "time" + + plugin "github.com/hashicorp/go-plugin" + "github.com/pkg/errors" + + "github.com/mattermost/mattermost/server/public/model" + "github.com/mattermost/mattermost/server/public/shared/mlog" + "github.com/mattermost/mattermost/server/public/utils" +) + +var ErrNotFound = errors.New("Item not found") + +type apiImplCreatorFunc func(*model.Manifest) API + +// registeredPlugin stores the state for a given plugin that has been activated +// or attempted to be activated this server run. +// +// If an installed plugin is missing from the env.registeredPlugins map, then the +// plugin is configured as disabled and has not been activated during this server run. +type registeredPlugin struct { + BundleInfo *model.BundleInfo + State int + Error string + + supervisor *supervisor +} + +// PrepackagedPlugin is a plugin prepackaged with the server and found on startup. +type PrepackagedPlugin struct { + Path string + IconData string + Manifest *model.Manifest + SignaturePath string +} + +// Environment represents the execution environment of active plugins. +// +// It is meant for use by the Mattermost server to manipulate, interact with and report on the set +// of active plugins. +type Environment struct { + registeredPlugins sync.Map + pluginHealthCheckJob *PluginHealthCheckJob + logger *mlog.Logger + metrics metricsInterface + newAPIImpl apiImplCreatorFunc + dbDriver AppDriver + pluginDir string + webappPluginDir string + prepackagedPlugins []*PrepackagedPlugin + transitionallyPrepackagedPlugins []*PrepackagedPlugin + prepackagedPluginsLock sync.RWMutex +} + +func NewEnvironment( + newAPIImpl apiImplCreatorFunc, + dbDriver AppDriver, + pluginDir string, + webappPluginDir string, + logger *mlog.Logger, + metrics metricsInterface, +) (*Environment, error) { + return &Environment{ + logger: logger, + metrics: metrics, + newAPIImpl: newAPIImpl, + dbDriver: dbDriver, + pluginDir: pluginDir, + webappPluginDir: webappPluginDir, + }, nil +} + +// Performs a full scan of the given path. +// +// This function will return info for all subdirectories that appear to be plugins (i.e. all +// subdirectories containing plugin manifest files, regardless of whether they could actually be +// parsed). +// +// Plugins are found non-recursively and paths beginning with a dot are always ignored. +func scanSearchPath(path string) ([]*model.BundleInfo, error) { + files, err := os.ReadDir(path) + if err != nil { + return nil, err + } + var ret []*model.BundleInfo + for _, file := range files { + if !file.IsDir() || file.Name()[0] == '.' { + continue + } + info := model.BundleInfoForPath(filepath.Join(path, file.Name())) + if info.Manifest != nil { + ret = append(ret, info) + } + } + return ret, nil +} + +// Returns a list of all plugins within the environment. +func (env *Environment) Available() ([]*model.BundleInfo, error) { + return scanSearchPath(env.pluginDir) +} + +// Returns a list of prepackaged plugins available in the local prepackaged_plugins folder, +// excluding those in transition out of being prepackaged. +// +// The list content is immutable and should not be modified. +func (env *Environment) PrepackagedPlugins() []*PrepackagedPlugin { + env.prepackagedPluginsLock.RLock() + defer env.prepackagedPluginsLock.RUnlock() + + return env.prepackagedPlugins +} + +// TransitionallyPrepackagedPlugins returns a list of plugins transitionally prepackaged in the +// local prepackaged_plugins folder. +// +// The list content is immutable and should not be modified. +func (env *Environment) TransitionallyPrepackagedPlugins() []*PrepackagedPlugin { + env.prepackagedPluginsLock.RLock() + defer env.prepackagedPluginsLock.RUnlock() + + return env.transitionallyPrepackagedPlugins +} + +// ClearTransitionallyPrepackagedPlugins clears the list of plugins transitionally prepackaged +// in the local prepackaged_plugins folder. +func (env *Environment) ClearTransitionallyPrepackagedPlugins() { + env.prepackagedPluginsLock.RLock() + defer env.prepackagedPluginsLock.RUnlock() + + env.transitionallyPrepackagedPlugins = nil +} + +// Returns a list of all currently active plugins within the environment. +// The returned list should not be modified. +func (env *Environment) Active() []*model.BundleInfo { + activePlugins := []*model.BundleInfo{} + env.registeredPlugins.Range(func(key, value any) bool { + plugin := value.(registeredPlugin) + if env.IsActive(plugin.BundleInfo.Manifest.Id) { + activePlugins = append(activePlugins, plugin.BundleInfo) + } + + return true + }) + + return activePlugins +} + +// IsActive returns true if the plugin with the given id is active. +func (env *Environment) IsActive(id string) bool { + return env.GetPluginState(id) == model.PluginStateRunning +} + +func (env *Environment) SetPluginError(id string, err string) { + if rp, ok := env.registeredPlugins.Load(id); ok { + p := rp.(registeredPlugin) + p.Error = err + env.registeredPlugins.Store(id, p) + } +} + +func (env *Environment) getPluginError(id string) string { + if rp, ok := env.registeredPlugins.Load(id); ok { + return rp.(registeredPlugin).Error + } + + return "" +} + +// GetPluginState returns the current state of a plugin (disabled, running, or error) +func (env *Environment) GetPluginState(id string) int { + rp, ok := env.registeredPlugins.Load(id) + if !ok { + return model.PluginStateNotRunning + } + + return rp.(registeredPlugin).State +} + +// setPluginState sets the current state of a plugin (disabled, running, or error) +func (env *Environment) setPluginState(id string, state int) { + if rp, ok := env.registeredPlugins.Load(id); ok { + p := rp.(registeredPlugin) + p.State = state + env.registeredPlugins.Store(id, p) + } +} + +// setPluginSupervisor records the supervisor for a registered plugin. +func (env *Environment) setPluginSupervisor(id string, supervisor *supervisor) { + if rp, ok := env.registeredPlugins.Load(id); ok { + p := rp.(registeredPlugin) + p.supervisor = supervisor + env.registeredPlugins.Store(id, p) + } +} + +// PublicFilesPath returns a path and true if the plugin with the given id is active. +// It returns an empty string and false if the path is not set or invalid +func (env *Environment) PublicFilesPath(id string) (string, error) { + if !env.IsActive(id) { + return "", fmt.Errorf("plugin not found: %v", id) + } + return filepath.Join(env.pluginDir, id, "public"), nil +} + +// Statuses returns a list of plugin statuses representing the state of every plugin +func (env *Environment) Statuses() (model.PluginStatuses, error) { + plugins, err := env.Available() + if err != nil { + return nil, errors.Wrap(err, "unable to get plugin statuses") + } + + pluginStatuses := make(model.PluginStatuses, 0, len(plugins)) + for _, plugin := range plugins { + // For now we don't handle bad manifests, we should + if plugin.Manifest == nil { + continue + } + + pluginState := env.GetPluginState(plugin.Manifest.Id) + + status := &model.PluginStatus{ + PluginId: plugin.Manifest.Id, + PluginPath: filepath.Dir(plugin.ManifestPath), + State: pluginState, + Error: env.getPluginError(plugin.Manifest.Id), + Name: plugin.Manifest.Name, + Description: plugin.Manifest.Description, + Version: plugin.Manifest.Version, + } + + pluginStatuses = append(pluginStatuses, status) + } + + return pluginStatuses, nil +} + +// GetManifest returns a manifest for a given pluginId. +// Returns ErrNotFound if plugin is not found. +func (env *Environment) GetManifest(pluginId string) (*model.Manifest, error) { + plugins, err := env.Available() + if err != nil { + return nil, errors.Wrap(err, "unable to get plugin statuses") + } + + for _, plugin := range plugins { + if plugin.Manifest != nil && plugin.Manifest.Id == pluginId { + return plugin.Manifest, nil + } + } + + return nil, ErrNotFound +} + +func checkMinServerVersion(pluginInfo *model.BundleInfo) error { + if pluginInfo.Manifest.MinServerVersion == "" { + return nil + } + + fulfilled, err := pluginInfo.Manifest.MeetMinServerVersion(model.CurrentVersion) + if err != nil { + return fmt.Errorf("%v: %v", err.Error(), pluginInfo.Manifest.Id) + } + if !fulfilled { + return fmt.Errorf("plugin requires Mattermost %v: %v", pluginInfo.Manifest.MinServerVersion, pluginInfo.Manifest.Id) + } + + return nil +} + +func (env *Environment) startPluginServer(pluginInfo *model.BundleInfo, opts ...func(*supervisor, *plugin.ClientConfig) error) error { + sup, err := newSupervisor(pluginInfo, env.newAPIImpl(pluginInfo.Manifest), env.dbDriver, env.logger, env.metrics, opts...) + if err != nil { + return errors.Wrapf(err, "unable to start plugin: %v", pluginInfo.Manifest.Id) + } + + // We pre-emptively set the state to running to prevent re-entrancy issues. + // The plugin's OnActivate hook can in-turn call UpdateConfiguration + // which again calls this method. This method is guarded against multiple calls, + // but fails if it is called recursively. + // + // Therefore, setting the state to running prevents this from happening, + // and in case there is an error, the defer clause will set the proper state anyways. + env.setPluginState(pluginInfo.Manifest.Id, model.PluginStateRunning) + + if err := sup.Hooks().OnActivate(); err != nil { + sup.Shutdown() + return err + } + env.setPluginSupervisor(pluginInfo.Manifest.Id, sup) + + return nil +} + +func (env *Environment) Activate(id string) (manifest *model.Manifest, activated bool, reterr error) { + defer func() { + if reterr != nil { + env.SetPluginError(id, reterr.Error()) + } else { + env.SetPluginError(id, "") + } + }() + + // Check if we are already active + if env.IsActive(id) { + return nil, false, nil + } + + plugins, err := env.Available() + if err != nil { + return nil, false, err + } + var pluginInfo *model.BundleInfo + for _, p := range plugins { + if p.Manifest != nil && p.Manifest.Id == id { + if pluginInfo != nil { + return nil, false, fmt.Errorf("multiple plugins found: %v", id) + } + pluginInfo = p + } + } + if pluginInfo == nil { + return nil, false, fmt.Errorf("plugin not found: %v", id) + } + + rp := newRegisteredPlugin(pluginInfo) + env.registeredPlugins.Store(id, rp) + + defer func() { + if reterr == nil { + env.setPluginState(id, model.PluginStateRunning) + } else { + env.setPluginState(id, model.PluginStateFailedToStart) + } + }() + + err = checkMinServerVersion(pluginInfo) + if err != nil { + return nil, false, err + } + + componentActivated := false + + if pluginInfo.Manifest.HasWebapp() { + var updatedManifest *model.Manifest + updatedManifest, err = env.UnpackWebappBundle(id) + if err != nil { + return nil, false, errors.Wrapf(err, "unable to generate webapp bundle: %v", id) + } + pluginInfo.Manifest.Webapp.BundleHash = updatedManifest.Webapp.BundleHash + + componentActivated = true + } + + if pluginInfo.Manifest.HasServer() { + err = env.startPluginServer(pluginInfo, WithExecutableFromManifest(pluginInfo)) + if err != nil { + return nil, false, err + } + componentActivated = true + } + + if !componentActivated { + return nil, false, fmt.Errorf("unable to start plugin: must at least have a web app or server component") + } + + mlog.Debug("Plugin activated", mlog.String("plugin_id", pluginInfo.Manifest.Id), mlog.String("version", pluginInfo.Manifest.Version)) + + return pluginInfo.Manifest, true, nil +} + +// Reattach allows the server to bind to an existing plugin instance launched elsewhere. +func (env *Environment) Reattach(manifest *model.Manifest, pluginReattachConfig *model.PluginReattachConfig) (reterr error) { + id := manifest.Id + + defer func() { + if reterr != nil { + env.SetPluginError(id, reterr.Error()) + } else { + env.SetPluginError(id, "") + } + }() + + // Check if we are already active + if env.IsActive(id) { + return nil + } + + pluginInfo := &model.BundleInfo{ + Path: "", + Manifest: manifest, + ManifestPath: "", + ManifestError: nil, + } + + rp := newRegisteredPlugin(pluginInfo) + env.registeredPlugins.Store(id, rp) + + defer func() { + if reterr == nil { + env.setPluginState(id, model.PluginStateRunning) + } else { + env.setPluginState(id, model.PluginStateFailedToStart) + } + }() + + err := checkMinServerVersion(pluginInfo) + if err != nil { + return nil + } + + if !pluginInfo.Manifest.HasServer() { + return errors.New("cannot reattach plugin without server component") + } + + if pluginInfo.Manifest.HasWebapp() { + env.logger.Warn("Ignoring webapp for reattached plugin", mlog.String("plugin_id", id)) + } + + err = env.startPluginServer(pluginInfo, WithReattachConfig(pluginReattachConfig)) + if err != nil { + return nil + } + + mlog.Debug("Plugin reattached", mlog.String("plugin_id", pluginInfo.Manifest.Id), mlog.String("version", pluginInfo.Manifest.Version)) + + return nil +} + +func (env *Environment) RemovePlugin(id string) { + if _, ok := env.registeredPlugins.Load(id); ok { + env.registeredPlugins.Delete(id) + } +} + +// Deactivates the plugin with the given id. +func (env *Environment) Deactivate(id string) bool { + p, ok := env.registeredPlugins.Load(id) + if !ok { + return false + } + + isActive := env.IsActive(id) + + env.setPluginState(id, model.PluginStateNotRunning) + + if !isActive { + return false + } + + rp := p.(registeredPlugin) + if rp.supervisor != nil { + if err := rp.supervisor.Hooks().OnDeactivate(); err != nil { + env.logger.Error("Plugin OnDeactivate() error", mlog.String("plugin_id", rp.BundleInfo.Manifest.Id), mlog.Err(err)) + } + rp.supervisor.Shutdown() + } + + return true +} + +// RestartPlugin deactivates, then activates the plugin with the given id. +func (env *Environment) RestartPlugin(id string) error { + env.Deactivate(id) + _, _, err := env.Activate(id) + return err +} + +// Shutdown deactivates all plugins and gracefully shuts down the environment. +func (env *Environment) Shutdown() { + env.TogglePluginHealthCheckJob(false) + + var wg sync.WaitGroup + env.registeredPlugins.Range(func(_, value any) bool { + rp := value.(registeredPlugin) + + if rp.supervisor == nil || !env.IsActive(rp.BundleInfo.Manifest.Id) { + return true + } + + wg.Add(1) + + done := make(chan bool) + go func() { + defer close(done) + if err := rp.supervisor.Hooks().OnDeactivate(); err != nil { + env.logger.Error("Plugin OnDeactivate() error", mlog.String("plugin_id", rp.BundleInfo.Manifest.Id), mlog.Err(err)) + } + }() + + go func() { + defer wg.Done() + + select { + case <-time.After(10 * time.Second): + env.logger.Warn("Plugin OnDeactivate() failed to complete in 10 seconds", mlog.String("plugin_id", rp.BundleInfo.Manifest.Id)) + case <-done: + } + + rp.supervisor.Shutdown() + }() + + return true + }) + + wg.Wait() + + env.registeredPlugins.Range(func(key, value any) bool { + env.registeredPlugins.Delete(key) + + return true + }) +} + +// UnpackWebappBundle unpacks webapp bundle for a given plugin id on disk. +func (env *Environment) UnpackWebappBundle(id string) (*model.Manifest, error) { + plugins, err := env.Available() + if err != nil { + return nil, errors.New("Unable to get available plugins") + } + var manifest *model.Manifest + for _, p := range plugins { + if p.Manifest != nil && p.Manifest.Id == id { + if manifest != nil { + return nil, fmt.Errorf("multiple plugins found: %v", id) + } + manifest = p.Manifest + } + } + if manifest == nil { + return nil, fmt.Errorf("plugin not found: %v", id) + } + + bundlePath := filepath.Clean(manifest.Webapp.BundlePath) + if bundlePath == "" || bundlePath[0] == '.' { + return nil, fmt.Errorf("invalid webapp bundle path") + } + bundlePath = filepath.Join(env.pluginDir, id, bundlePath) + destinationPath := filepath.Join(env.webappPluginDir, id) + + if err = os.RemoveAll(destinationPath); err != nil { + return nil, errors.Wrapf(err, "unable to remove old webapp bundle directory: %v", destinationPath) + } + + if err = utils.CopyDir(filepath.Dir(bundlePath), destinationPath); err != nil { + return nil, errors.Wrapf(err, "unable to copy webapp bundle directory: %v", id) + } + + sourceBundleFilepath := filepath.Join(destinationPath, filepath.Base(bundlePath)) + + sourceBundleFileContents, err := os.ReadFile(sourceBundleFilepath) + if err != nil { + return nil, errors.Wrapf(err, "unable to read webapp bundle: %v", id) + } + + hash := fnv.New64a() + if _, err = hash.Write(sourceBundleFileContents); err != nil { + return nil, errors.Wrapf(err, "unable to generate hash for webapp bundle: %v", id) + } + manifest.Webapp.BundleHash = hash.Sum([]byte{}) + + if err = os.Rename( + sourceBundleFilepath, + filepath.Join(destinationPath, fmt.Sprintf("%s_%x_bundle.js", id, manifest.Webapp.BundleHash)), + ); err != nil { + return nil, errors.Wrapf(err, "unable to rename webapp bundle: %v", id) + } + + return manifest, nil +} + +// HooksForPlugin returns the hooks API for the plugin with the given id. +// +// Consider using RunMultiPluginHook instead. +func (env *Environment) HooksForPlugin(id string) (Hooks, error) { + if p, ok := env.registeredPlugins.Load(id); ok { + rp := p.(registeredPlugin) + if rp.supervisor != nil && env.IsActive(id) { + return rp.supervisor.Hooks(), nil + } + } + + return nil, fmt.Errorf("plugin not found: %v", id) +} + +// RunMultiPluginHook invokes hookRunnerFunc for each active plugin that implements the given hookId. +// +// If hookRunnerFunc returns false, iteration will not continue. The iteration order among active +// plugins is not specified. +func (env *Environment) RunMultiPluginHook(hookRunnerFunc func(hooks Hooks, manifest *model.Manifest) bool, hookId int) { + startTime := time.Now() + + env.registeredPlugins.Range(func(key, value any) bool { + rp := value.(registeredPlugin) + + if rp.supervisor == nil || !rp.supervisor.Implements(hookId) || !env.IsActive(rp.BundleInfo.Manifest.Id) { + return true + } + + hookStartTime := time.Now() + result := hookRunnerFunc(rp.supervisor.Hooks(), rp.BundleInfo.Manifest) + + if env.metrics != nil { + elapsedTime := float64(time.Since(hookStartTime)) / float64(time.Second) + env.metrics.ObservePluginMultiHookIterationDuration(rp.BundleInfo.Manifest.Id, elapsedTime) + } + + return result + }) + + if env.metrics != nil { + elapsedTime := float64(time.Since(startTime)) / float64(time.Second) + env.metrics.ObservePluginMultiHookDuration(elapsedTime) + } +} + +// PerformHealthCheck uses the active plugin's supervisor to verify if the plugin has crashed. +func (env *Environment) PerformHealthCheck(id string) error { + p, ok := env.registeredPlugins.Load(id) + if !ok { + return nil + } + rp := p.(registeredPlugin) + + sup := rp.supervisor + if sup == nil { + return nil + } + return sup.PerformHealthCheck() +} + +// SetPrepackagedPlugins saves prepackaged plugins in the environment. +func (env *Environment) SetPrepackagedPlugins(plugins, transitionalPlugins []*PrepackagedPlugin) { + env.prepackagedPluginsLock.Lock() + env.prepackagedPlugins = plugins + env.transitionallyPrepackagedPlugins = transitionalPlugins + env.prepackagedPluginsLock.Unlock() +} + +func newRegisteredPlugin(bundle *model.BundleInfo) registeredPlugin { + state := model.PluginStateNotRunning + return registeredPlugin{State: state, BundleInfo: bundle} +} + +// TogglePluginHealthCheckJob starts a new job if one is not running and is set to enabled, or kills an existing one if set to disabled. +func (env *Environment) TogglePluginHealthCheckJob(enable bool) { + // Config is set to enable. No job exists, start a new job. + if enable && env.pluginHealthCheckJob == nil { + mlog.Debug("Enabling plugin health check job", mlog.Duration("interval_s", HealthCheckInterval)) + + job := newPluginHealthCheckJob(env) + env.pluginHealthCheckJob = job + go job.run() + } + + // Config is set to disable. Job exists, kill existing job. + if !enable && env.pluginHealthCheckJob != nil { + mlog.Debug("Disabling plugin health check job") + + env.pluginHealthCheckJob.Cancel() + env.pluginHealthCheckJob = nil + } +} + +// GetPluginHealthCheckJob returns the configured PluginHealthCheckJob, if any. +func (env *Environment) GetPluginHealthCheckJob() *PluginHealthCheckJob { + return env.pluginHealthCheckJob +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/plugin/hclog_adapter.go b/vendor/github.com/mattermost/mattermost/server/public/plugin/hclog_adapter.go new file mode 100644 index 00000000..e4f25f4d --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/plugin/hclog_adapter.go @@ -0,0 +1,135 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package plugin + +import ( + "fmt" + "io" + "log" + "strings" + + "github.com/hashicorp/go-hclog" + + "github.com/mattermost/mattermost/server/public/shared/mlog" +) + +type hclogAdapter struct { + wrappedLogger *mlog.Logger + extrasKey string +} + +func (h *hclogAdapter) Log(level hclog.Level, msg string, args ...any) { + switch level { + case hclog.Trace: + h.Trace(msg, args...) + case hclog.Debug: + h.Debug(msg, args...) + case hclog.Info: + h.Info(msg, args...) + case hclog.Warn: + h.Warn(msg, args...) + case hclog.Error: + h.Error(msg, args...) + default: + // For unknown/unexpected log level, treat it as an error so we notice and fix the code. + h.Error(msg, args...) + } +} + +func (h *hclogAdapter) Trace(msg string, args ...any) { + extras := strings.TrimSpace(fmt.Sprint(args...)) + if extras != "" { + h.wrappedLogger.Debug(msg, mlog.String(h.extrasKey, extras)) + } else { + h.wrappedLogger.Debug(msg) + } +} + +func (h *hclogAdapter) Debug(msg string, args ...any) { + extras := strings.TrimSpace(fmt.Sprint(args...)) + if extras != "" { + h.wrappedLogger.Debug(msg, mlog.String(h.extrasKey, extras)) + } else { + h.wrappedLogger.Debug(msg) + } +} + +func (h *hclogAdapter) Info(msg string, args ...any) { + extras := strings.TrimSpace(fmt.Sprint(args...)) + if extras != "" { + h.wrappedLogger.Info(msg, mlog.String(h.extrasKey, extras)) + } else { + h.wrappedLogger.Info(msg) + } +} + +func (h *hclogAdapter) Warn(msg string, args ...any) { + extras := strings.TrimSpace(fmt.Sprint(args...)) + if extras != "" { + h.wrappedLogger.Warn(msg, mlog.String(h.extrasKey, extras)) + } else { + h.wrappedLogger.Warn(msg) + } +} + +func (h *hclogAdapter) Error(msg string, args ...any) { + extras := strings.TrimSpace(fmt.Sprint(args...)) + if extras != "" { + h.wrappedLogger.Error(msg, mlog.String(h.extrasKey, extras)) + } else { + h.wrappedLogger.Error(msg) + } +} + +func (h *hclogAdapter) IsTrace() bool { + return false +} + +func (h *hclogAdapter) IsDebug() bool { + return true +} + +func (h *hclogAdapter) IsInfo() bool { + return true +} + +func (h *hclogAdapter) IsWarn() bool { + return true +} + +func (h *hclogAdapter) IsError() bool { + return true +} + +func (h *hclogAdapter) With(args ...any) hclog.Logger { + return h +} + +func (h *hclogAdapter) Named(name string) hclog.Logger { + return h +} + +func (h *hclogAdapter) ResetNamed(name string) hclog.Logger { + return h +} + +func (h *hclogAdapter) StandardLogger(opts *hclog.StandardLoggerOptions) *log.Logger { + return h.wrappedLogger.StdLogger(mlog.LvlInfo) +} + +func (h *hclogAdapter) StandardWriter(opts *hclog.StandardLoggerOptions) io.Writer { + return h.wrappedLogger.StdLogWriter() +} + +func (h *hclogAdapter) SetLevel(hclog.Level) {} + +func (h *hclogAdapter) GetLevel() hclog.Level { return hclog.NoLevel } + +func (h *hclogAdapter) ImpliedArgs() []any { + return []any{} +} + +func (h *hclogAdapter) Name() string { + return "MattermostPluginLogger" +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/plugin/health_check.go b/vendor/github.com/mattermost/mattermost/server/public/plugin/health_check.go new file mode 100644 index 00000000..7f24ab0a --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/plugin/health_check.go @@ -0,0 +1,124 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package plugin + +import ( + "sync" + "time" + + "github.com/mattermost/mattermost/server/public/model" + "github.com/mattermost/mattermost/server/public/shared/mlog" +) + +const ( + HealthCheckInterval = 30 * time.Second // How often the health check should run + HealthCheckDeactivationWindow = 60 * time.Minute // How long we wait for num fails to occur before deactivating the plugin + HealthCheckPingFailLimit = 3 // How many times we call RPC ping in a row before it is considered a failure + HealthCheckNumRestartsLimit = 3 // How many times we restart a plugin before we deactivate it +) + +type PluginHealthCheckJob struct { + cancel chan struct{} + cancelled chan struct{} + cancelOnce sync.Once + env *Environment + failureTimestamps sync.Map +} + +// run continuously performs health checks on all active plugins, on a timer. +func (job *PluginHealthCheckJob) run() { + mlog.Debug("Plugin health check job starting.") + defer close(job.cancelled) + + ticker := time.NewTicker(HealthCheckInterval) + defer ticker.Stop() + + for { + select { + case <-ticker.C: + activePlugins := job.env.Active() + for _, plugin := range activePlugins { + job.CheckPlugin(plugin.Manifest.Id) + } + case <-job.cancel: + return + } + } +} + +// CheckPlugin determines the plugin's health status, then handles the error or success case. +// If the plugin passes the health check, do nothing. +// If the plugin fails the health check, the function either restarts or deactivates the plugin, based on the quantity and frequency of its failures. +func (job *PluginHealthCheckJob) CheckPlugin(id string) { + err := job.env.PerformHealthCheck(id) + if err == nil { + return + } + + mlog.Warn("Health check failed for plugin", mlog.String("id", id), mlog.Err(err)) + timestamps := job.getStoredTimestamps(id) + timestamps = append(timestamps, time.Now()) + + if shouldDeactivatePlugin(timestamps) { + // Order matters here, must deactivate first and then set plugin state + mlog.Debug("Deactivating plugin due to multiple crashes", mlog.String("id", id)) + job.env.Deactivate(id) + + // Reset timestamp state for this plugin + job.failureTimestamps.Delete(id) + job.env.setPluginState(id, model.PluginStateFailedToStayRunning) + } else { + mlog.Debug("Restarting plugin due to failed health check", mlog.String("id", id)) + if err := job.env.RestartPlugin(id); err != nil { + mlog.Error("Failed to restart plugin", mlog.String("id", id), mlog.Err(err)) + } + + // Store this failure so we can continue to monitor the plugin + job.failureTimestamps.Store(id, removeStaleTimestamps(timestamps)) + } +} + +// getStoredTimestamps returns the stored failure timestamps for a plugin. +func (job *PluginHealthCheckJob) getStoredTimestamps(id string) []time.Time { + timestamps, ok := job.failureTimestamps.Load(id) + if !ok { + timestamps = []time.Time{} + } + return timestamps.([]time.Time) +} + +func newPluginHealthCheckJob(env *Environment) *PluginHealthCheckJob { + return &PluginHealthCheckJob{ + cancel: make(chan struct{}), + cancelled: make(chan struct{}), + env: env, + } +} + +func (job *PluginHealthCheckJob) Cancel() { + job.cancelOnce.Do(func() { + close(job.cancel) + }) + <-job.cancelled +} + +// shouldDeactivatePlugin determines if a plugin needs to be deactivated after the plugin has failed (HealthCheckNumRestartsLimit) times, +// within the configured time window (HealthCheckDeactivationWindow). +func shouldDeactivatePlugin(failedTimestamps []time.Time) bool { + if len(failedTimestamps) < HealthCheckNumRestartsLimit { + return false + } + + index := len(failedTimestamps) - HealthCheckNumRestartsLimit + return time.Since(failedTimestamps[index]) <= HealthCheckDeactivationWindow +} + +// removeStaleTimestamps only keeps the last HealthCheckNumRestartsLimit items in timestamps. +func removeStaleTimestamps(timestamps []time.Time) []time.Time { + if len(timestamps) > HealthCheckNumRestartsLimit { + timestamps = timestamps[len(timestamps)-HealthCheckNumRestartsLimit:] + } + + return timestamps +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/plugin/hijack.go b/vendor/github.com/mattermost/mattermost/server/public/plugin/hijack.go new file mode 100644 index 00000000..ffe178a3 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/plugin/hijack.go @@ -0,0 +1,205 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package plugin + +import ( + "bufio" + "errors" + "net" + "net/http" + "net/rpc" + "time" +) + +const ( + hijackedConnReadBufSize = 4096 +) + +var ( + ErrNotHijacked = errors.New("response is not hijacked") + ErrAlreadyHijacked = errors.New("response was already hijacked") + ErrCannotHijack = errors.New("response cannot be hijacked") +) + +func (w *httpResponseWriterRPCServer) HjConnRWRead(b []byte, reply *[]byte) error { + if w.hjr == nil { + return ErrNotHijacked + } + data := make([]byte, len(b)) + n, err := w.hjr.bufrw.Read(data) + if err != nil { + return err + } + *reply = data[:n] + return nil +} + +func (w *httpResponseWriterRPCServer) HjConnRWWrite(b []byte, reply *int) error { + if w.hjr == nil { + return ErrNotHijacked + } + n, err := w.hjr.bufrw.Write(b) + if err != nil { + return err + } + *reply = n + return nil +} + +func (w *httpResponseWriterRPCServer) HjConnRead(size int, reply *[]byte) error { + if w.hjr == nil { + return ErrNotHijacked + } + if len(w.hjr.readBuf) < size { + w.hjr.readBuf = make([]byte, size) + } + n, err := w.hjr.conn.Read(w.hjr.readBuf[:size]) + if err != nil { + return err + } + *reply = w.hjr.readBuf[:n] + return nil +} + +func (w *httpResponseWriterRPCServer) HjConnWrite(b []byte, reply *int) error { + if w.hjr == nil { + return ErrNotHijacked + } + n, err := w.hjr.conn.Write(b) + if err != nil { + return err + } + *reply = n + return nil +} + +func (w *httpResponseWriterRPCServer) HjConnClose(args struct{}, reply *struct{}) error { + if w.hjr == nil { + return ErrNotHijacked + } + return w.hjr.conn.Close() +} + +func (w *httpResponseWriterRPCServer) HjConnSetDeadline(t time.Time, reply *struct{}) error { + if w.hjr == nil { + return ErrNotHijacked + } + return w.hjr.conn.SetDeadline(t) +} + +func (w *httpResponseWriterRPCServer) HjConnSetReadDeadline(t time.Time, reply *struct{}) error { + if w.hjr == nil { + return ErrNotHijacked + } + return w.hjr.conn.SetReadDeadline(t) +} + +func (w *httpResponseWriterRPCServer) HjConnSetWriteDeadline(t time.Time, reply *struct{}) error { + if w.hjr == nil { + return ErrNotHijacked + } + return w.hjr.conn.SetWriteDeadline(t) +} + +func (w *httpResponseWriterRPCServer) HijackResponse(args struct{}, reply *struct{}) error { + if w.hjr != nil { + return ErrAlreadyHijacked + } + hj, ok := w.w.(http.Hijacker) + if !ok { + return ErrCannotHijack + } + conn, bufrw, err := hj.Hijack() + if err != nil { + return err + } + + w.hjr = &hijackedResponse{ + conn: conn, + bufrw: bufrw, + readBuf: make([]byte, hijackedConnReadBufSize), + } + return nil +} + +type hijackedConn struct { + client *rpc.Client +} + +type hijackedConnRW struct { + client *rpc.Client +} + +func (w *hijackedConnRW) Read(b []byte) (int, error) { + var data []byte + if err := w.client.Call("Plugin.HjConnRWRead", b, &data); err != nil { + return 0, err + } + copy(b, data) + return len(data), nil +} + +func (w *hijackedConnRW) Write(b []byte) (int, error) { + var n int + if err := w.client.Call("Plugin.HjConnRWWrite", b, &n); err != nil { + return 0, err + } + return n, nil +} + +func (w *hijackedConn) Read(b []byte) (int, error) { + var data []byte + if err := w.client.Call("Plugin.HjConnRead", len(b), &data); err != nil { + return 0, err + } + copy(b, data) + return len(data), nil +} + +func (w *hijackedConn) Write(b []byte) (int, error) { + var n int + if err := w.client.Call("Plugin.HjConnWrite", b, &n); err != nil { + return 0, err + } + return n, nil +} + +func (w *hijackedConn) Close() error { + return w.client.Call("Plugin.HjConnClose", struct{}{}, nil) +} + +func (w *hijackedConn) LocalAddr() net.Addr { + return nil +} + +func (w *hijackedConn) RemoteAddr() net.Addr { + return nil +} + +func (w *hijackedConn) SetDeadline(t time.Time) error { + return w.client.Call("Plugin.HjConnSetDeadline", t, nil) +} + +func (w *hijackedConn) SetReadDeadline(t time.Time) error { + return w.client.Call("Plugin.HjConnSetReadDeadline", t, nil) +} + +func (w *hijackedConn) SetWriteDeadline(t time.Time) error { + return w.client.Call("Plugin.HjConnSetWriteDeadline", t, nil) +} + +func (w *httpResponseWriterRPCClient) Hijack() (net.Conn, *bufio.ReadWriter, error) { + c := &hijackedConn{ + client: w.client, + } + rw := &hijackedConnRW{ + client: w.client, + } + + if err := w.client.Call("Plugin.HijackResponse", struct{}{}, nil); err != nil { + return nil, nil, err + } + + return c, bufio.NewReadWriter(bufio.NewReader(rw), bufio.NewWriter(rw)), nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/plugin/hooks.go b/vendor/github.com/mattermost/mattermost/server/public/plugin/hooks.go new file mode 100644 index 00000000..53e44dc4 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/plugin/hooks.go @@ -0,0 +1,421 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package plugin + +import ( + "io" + "net/http" + + saml2 "github.com/mattermost/gosaml2" + "github.com/mattermost/mattermost/server/public/model" +) + +// These assignments are part of the wire protocol used to trigger hook events in plugins. +// +// Feel free to add more, but do not change existing assignments. Follow the naming convention of +// ID as the autogenerated glue code depends on that. +const ( + OnActivateID = 0 + OnDeactivateID = 1 + ServeHTTPID = 2 + OnConfigurationChangeID = 3 + ExecuteCommandID = 4 + MessageWillBePostedID = 5 + MessageWillBeUpdatedID = 6 + MessageHasBeenPostedID = 7 + MessageHasBeenUpdatedID = 8 + UserHasJoinedChannelID = 9 + UserHasLeftChannelID = 10 + UserHasJoinedTeamID = 11 + UserHasLeftTeamID = 12 + ChannelHasBeenCreatedID = 13 + FileWillBeUploadedID = 14 + UserWillLogInID = 15 + UserHasLoggedInID = 16 + UserHasBeenCreatedID = 17 + ReactionHasBeenAddedID = 18 + ReactionHasBeenRemovedID = 19 + OnPluginClusterEventID = 20 + OnWebSocketConnectID = 21 + OnWebSocketDisconnectID = 22 + WebSocketMessageHasBeenPostedID = 23 + RunDataRetentionID = 24 + OnInstallID = 25 + OnSendDailyTelemetryID = 26 + OnCloudLimitsUpdatedID = 27 + deprecatedUserHasPermissionToCollectionID = 28 + deprecatedGetAllUserIdsForCollectionID = 29 + deprecatedGetAllCollectionIDsForUserID = 30 + deprecatedGetTopicRedirectID = 31 + deprecatedGetCollectionMetadataByIdsID = 32 + deprecatedGetTopicMetadataByIdsID = 33 + ConfigurationWillBeSavedID = 34 + NotificationWillBePushedID = 35 + UserHasBeenDeactivatedID = 36 + MessageHasBeenDeletedID = 37 + MessagesWillBeConsumedID = 38 + ServeMetricsID = 39 + OnSharedChannelsSyncMsgID = 40 + OnSharedChannelsPingID = 41 + PreferencesHaveChangedID = 42 + OnSharedChannelsAttachmentSyncMsgID = 43 + OnSharedChannelsProfileImageSyncMsgID = 44 + GenerateSupportDataID = 45 + OnSAMLLoginID = 46 + EmailNotificationWillBeSentID = 47 + TotalHooksID = iota +) + +const ( + // DismissPostError dismisses a pending post when the error is returned from MessageWillBePosted. + DismissPostError = "plugin.message_will_be_posted.dismiss_post" +) + +// Hooks describes the methods a plugin may implement to automatically receive the corresponding +// event. +// +// A plugin only need implement the hooks it cares about. The MattermostPlugin provides some +// default implementations for convenience but may be overridden. +type Hooks interface { + // OnActivate is invoked when the plugin is activated. If an error is returned, the plugin + // will be terminated. The plugin will not receive hooks until after OnActivate returns + // without error. OnConfigurationChange will be called once before OnActivate. + // + // Minimum server version: 5.2 + OnActivate() error + + // Implemented returns a list of hooks that are implemented by the plugin. + // Plugins do not need to provide an implementation. Any given will be ignored. + // + // Minimum server version: 5.2 + Implemented() ([]string, error) + + // OnDeactivate is invoked when the plugin is deactivated. This is the plugin's last chance to + // use the API, and the plugin will be terminated shortly after this invocation. The plugin + // will stop receiving hooks just prior to this method being called. + // + // Minimum server version: 5.2 + OnDeactivate() error + + // OnConfigurationChange is invoked when configuration changes may have been made. Any + // returned error is logged, but does not stop the plugin. You must be prepared to handle + // a configuration failure gracefully. It is called once before OnActivate. + // + // Minimum server version: 5.2 + OnConfigurationChange() error + + // ServeHTTP allows the plugin to implement the http.Handler interface. Requests destined for + // the /plugins/{id} path will be routed to the plugin. + // + // The Mattermost-User-Id header will be present if (and only if) the request is by an + // authenticated user. + // + // Minimum server version: 5.2 + ServeHTTP(c *Context, w http.ResponseWriter, r *http.Request) + + // ExecuteCommand executes a command that has been previously registered via the RegisterCommand + // API. + // + // Minimum server version: 5.2 + ExecuteCommand(c *Context, args *model.CommandArgs) (*model.CommandResponse, *model.AppError) + + // UserHasBeenCreated is invoked after a user was created. + // + // Minimum server version: 5.10 + UserHasBeenCreated(c *Context, user *model.User) + + // UserWillLogIn before the login of the user is returned. Returning a non empty string will reject the login event. + // If you don't need to reject the login event, see UserHasLoggedIn + // + // Minimum server version: 5.2 + UserWillLogIn(c *Context, user *model.User) string + + // UserHasLoggedIn is invoked after a user has logged in. + // + // Minimum server version: 5.2 + UserHasLoggedIn(c *Context, user *model.User) + + // MessageWillBePosted is invoked when a message is posted by a user before it is committed + // to the database. If you also want to act on edited posts, see MessageWillBeUpdated. + // + // To reject a post, return an non-empty string describing why the post was rejected. + // To modify the post, return the replacement, non-nil *model.Post and an empty string. + // To allow the post without modification, return a nil *model.Post and an empty string. + // To dismiss the post, return a nil *model.Post and the const DismissPostError string. + // + // If you don't need to modify or reject posts, use MessageHasBeenPosted instead. + // + // Note that this method will be called for posts created by plugins, including the plugin that + // created the post. + // + // Minimum server version: 5.2 + MessageWillBePosted(c *Context, post *model.Post) (*model.Post, string) + + // MessageWillBeUpdated is invoked when a message is updated by a user before it is committed + // to the database. If you also want to act on new posts, see MessageWillBePosted. + // Return values should be the modified post or nil if rejected and an explanation for the user. + // On rejection, the post will be kept in its previous state. + // + // If you don't need to modify or rejected updated posts, use MessageHasBeenUpdated instead. + // + // Note that this method will be called for posts updated by plugins, including the plugin that + // updated the post. + // + // Minimum server version: 5.2 + MessageWillBeUpdated(c *Context, newPost, oldPost *model.Post) (*model.Post, string) + + // MessageHasBeenPosted is invoked after the message has been committed to the database. + // If you need to modify or reject the post, see MessageWillBePosted + // Note that this method will be called for posts created by plugins, including the plugin that + // created the post. + // + // Minimum server version: 5.2 + MessageHasBeenPosted(c *Context, post *model.Post) + + // MessageHasBeenUpdated is invoked after a message is updated and has been updated in the database. + // If you need to modify or reject the post, see MessageWillBeUpdated + // Note that this method will be called for posts created by plugins, including the plugin that + // created the post. + // + // Minimum server version: 5.2 + MessageHasBeenUpdated(c *Context, newPost, oldPost *model.Post) + + // MessagesWillBeConsumed is invoked when a message is requested by a client before it is returned + // to the client + // + // Note that this method will be called for posts created by plugins, including the plugin that + // created the post. + // + // Minimum server version: 9.3 + MessagesWillBeConsumed(posts []*model.Post) []*model.Post + + // MessageHasBeenDeleted is invoked after the message has been deleted from the database. + // Note that this method will be called for posts deleted by plugins, including the plugin that + // deleted the post. + // + // Minimum server version: 9.1 + MessageHasBeenDeleted(c *Context, post *model.Post) + + // ChannelHasBeenCreated is invoked after the channel has been committed to the database. + // + // Minimum server version: 5.2 + ChannelHasBeenCreated(c *Context, channel *model.Channel) + + // UserHasJoinedChannel is invoked after the membership has been committed to the database. + // If actor is not nil, the user was invited to the channel by the actor. + // + // Minimum server version: 5.2 + UserHasJoinedChannel(c *Context, channelMember *model.ChannelMember, actor *model.User) + + // UserHasLeftChannel is invoked after the membership has been removed from the database. + // If actor is not nil, the user was removed from the channel by the actor. + // + // Minimum server version: 5.2 + UserHasLeftChannel(c *Context, channelMember *model.ChannelMember, actor *model.User) + + // UserHasJoinedTeam is invoked after the membership has been committed to the database. + // If actor is not nil, the user was added to the team by the actor. + // + // Minimum server version: 5.2 + UserHasJoinedTeam(c *Context, teamMember *model.TeamMember, actor *model.User) + + // UserHasLeftTeam is invoked after the membership has been removed from the database. + // If actor is not nil, the user was removed from the team by the actor. + // + // Minimum server version: 5.2 + UserHasLeftTeam(c *Context, teamMember *model.TeamMember, actor *model.User) + + // FileWillBeUploaded is invoked when a file is uploaded, but before it is committed to backing store. + // Read from file to retrieve the body of the uploaded file. + // + // To reject a file upload, return an non-empty string describing why the file was rejected. + // To modify the file, write to the output and/or return a non-nil *model.FileInfo, as well as an empty string. + // To allow the file without modification, do not write to the output and return a nil *model.FileInfo and an empty string. + // + // Note that this method will be called for files uploaded by plugins, including the plugin that uploaded the post. + // FileInfo.Size will be automatically set properly if you modify the file. + // + // Minimum server version: 5.2 + FileWillBeUploaded(c *Context, info *model.FileInfo, file io.Reader, output io.Writer) (*model.FileInfo, string) + + // ReactionHasBeenAdded is invoked after the reaction has been committed to the database. + // + // Note that this method will be called for reactions added by plugins, including the plugin that + // added the reaction. + // + // Minimum server version: 5.30 + ReactionHasBeenAdded(c *Context, reaction *model.Reaction) + + // ReactionHasBeenRemoved is invoked after the removal of the reaction has been committed to the database. + // + // Note that this method will be called for reactions removed by plugins, including the plugin that + // removed the reaction. + // + // Minimum server version: 5.30 + ReactionHasBeenRemoved(c *Context, reaction *model.Reaction) + + // OnPluginClusterEvent is invoked when an intra-cluster plugin event is received. + // + // This is used to allow communication between multiple instances of the same plugin + // that are running on separate nodes of the same High-Availability cluster. + // This hook receives events sent by a call to PublishPluginClusterEvent. + // + // Minimum server version: 5.36 + OnPluginClusterEvent(c *Context, ev model.PluginClusterEvent) + + // OnWebSocketConnect is invoked when a new websocket connection is opened. + // + // This is used to track which users have connections opened with the Mattermost + // websocket. + // + // Minimum server version: 6.0 + OnWebSocketConnect(webConnID, userID string) + + // OnWebSocketDisconnect is invoked when a websocket connection is closed. + // + // This is used to track which users have connections opened with the Mattermost + // websocket. + // + // Minimum server version: 6.0 + OnWebSocketDisconnect(webConnID, userID string) + + // WebSocketMessageHasBeenPosted is invoked when a websocket message is received. + // + // Minimum server version: 6.0 + WebSocketMessageHasBeenPosted(webConnID, userID string, req *model.WebSocketRequest) + + // RunDataRetention is invoked during a DataRetentionJob. + // + // Minimum server version: 6.4 + RunDataRetention(nowTime, batchSize int64) (int64, error) + + // OnInstall is invoked after the installation of a plugin as part of the onboarding. + // It's called on every installation, not only once. + // + // In the future, other plugin installation methods will trigger this hook, e.g. an installation via the Marketplace. + // + // Minimum server version: 6.5 + OnInstall(c *Context, event model.OnInstallEvent) error + + // OnSendDailyTelemetry is invoked when the server send the daily telemetry data. + // + // Minimum server version: 6.5 + OnSendDailyTelemetry() + + // OnCloudLimitsUpdated is invoked product limits change, for example when plan tiers change + // + // Minimum server version: 7.0 + OnCloudLimitsUpdated(limits *model.ProductLimits) + + // ConfigurationWillBeSaved is invoked before saving the configuration to the + // backing store. + // An error can be returned to reject the operation. Additionally, a new + // config object can be returned to be stored in place of the provided one. + // Minimum server version: 8.0 + ConfigurationWillBeSaved(newCfg *model.Config) (*model.Config, error) + + // EmailNotificationWillBeSent is invoked before an email notification is sent to a user. + // This allows plugins to customize the email notification content including subject, + // title, subtitle, message content, buttons, and other email properties. + // + // To reject an email notification, return an non-empty string describing why the notification was rejected. + // To modify the notification, return the replacement, non-nil *model.EmailNotificationContent and an empty string. + // To allow the notification without modification, return a nil *model.EmailNotificationContent and an empty string. + // + // Note that core identifiers (PostId, ChannelId, TeamId, SenderId, RecipientId, RootId) and + // context fields (ChannelType, IsDirectMessage, etc.) are immutable and changes to them will be ignored. + // Only customizable content fields can be modified. + // + // Minimum server version: 11.00 + EmailNotificationWillBeSent(emailNotification *model.EmailNotification) (*model.EmailNotificationContent, string) + + // NotificationWillBePushed is invoked before a push notification is sent to the push + // notification server. + // + // To reject a notification, return an non-empty string describing why the notification was rejected. + // To modify the notification, return the replacement, non-nil *model.PushNotification and an empty string. + // To allow the notification without modification, return a nil *model.PushNotification and an empty string. + // + // Note that this method will be called for push notifications created by plugins, including the plugin that + // created the notification. + // + // Minimum server version: 9.0 + NotificationWillBePushed(pushNotification *model.PushNotification, userID string) (*model.PushNotification, string) + + // UserHasBeenDeactivated is invoked when a user is deactivated. + // + // Minimum server version: 9.1 + UserHasBeenDeactivated(c *Context, user *model.User) + + // ServeMetrics allows plugins to expose their own metrics endpoint through + // the server's metrics HTTP listener (e.g. "localhost:8067"). + // Requests destined to the /plugins/{id}/metrics path will be routed to the plugin. + // + // Minimum server version: 9.2 + ServeMetrics(c *Context, w http.ResponseWriter, r *http.Request) + + // OnSharedChannelsSyncMsg is invoked for plugins that wish to receive synchronization messages from the + // Shared Channels service for which they have been invited via InviteRemote. Each SyncMsg may contain + // multiple updates (posts, reactions, attachments, users) for a single channel. + // + // The cursor will be advanced based on the SyncResponse returned. + // + // Minimum server version: 9.5 + OnSharedChannelsSyncMsg(msg *model.SyncMsg, rc *model.RemoteCluster) (model.SyncResponse, error) + + // OnSharedChannelsPing is invoked for plugins to indicate the health of the plugin and the connection + // to the upstream service (e.g. MS Graph APIs). + // + // Return true to indicate all is well. + // + // Return false to indicate there is a problem with the plugin or connection to upstream service. + // Some number of failed pings will result in the plugin being marked offline and it will stop receiving + // OnSharedChannelsSyncMsg calls until it comes back online. The plugin will also appear offline in the status + // report via the `secure-connection status` slash command. + // + // Minimum server version: 9.5 + OnSharedChannelsPing(rc *model.RemoteCluster) bool + + // PreferencesHaveChanged is invoked after one or more of a user's preferences have changed. + // Note that this method will be called for preferences changed by plugins, including the plugin that changed + // the preferences. + // + // Minimum server version: 9.5 + PreferencesHaveChanged(c *Context, preferences []model.Preference) + + // OnSharedChannelsAttachmentSyncMsg is invoked for plugins that wish to receive synchronization messages from the + // Shared Channels service for which they have been invited via InviteRemote. Each call represents one file attachment + // to be synchronized. + // + // The cursor will be advanced based on the timestamp returned if no error is returned. + // + // Minimum server version: 9.5 + OnSharedChannelsAttachmentSyncMsg(fi *model.FileInfo, post *model.Post, rc *model.RemoteCluster) error + + // OnSharedChannelsProfileImageSyncMsg is invoked for plugins that wish to receive synchronization messages from the + // Shared Channels service for which they have been invited via InviteRemote. Each call represents one user profile + // image that should be synchronized. `App.GetProfileImage` can be used to fetch the image bytes. + // + // The cursor will be advanced based on the timestamp returned if no error is returned. + // + // Minimum server version: 9.5 + OnSharedChannelsProfileImageSyncMsg(user *model.User, rc *model.RemoteCluster) error + + // GenerateSupportData is invoked when a Support Packet gets generated. + // It allows plugins to include their own content in the Support Packet. + // + // Plugins may specififes a "support_packet" field in the manifest props with a custom text. + // By doing so, the plugin will be included in the Support Packet UI and the user will be able to select it. + // This hook will only be called, if the user selects the plugin in the Support Packet UI. + // + // If no "support_packet" is specified, this hook will always be called. + // + // Minimum server version: 9.8 + GenerateSupportData(c *Context) ([]*model.FileData, error) + + // OnSAMLLogin is invoked after a successful SAML login. + // + // Minimum server version: 10.7 + OnSAMLLogin(c *Context, user *model.User, assertion *saml2.AssertionInfo) error +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/plugin/hooks_timer_layer_generated.go b/vendor/github.com/mattermost/mattermost/server/public/plugin/hooks_timer_layer_generated.go new file mode 100644 index 00000000..ded4d9e3 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/plugin/hooks_timer_layer_generated.go @@ -0,0 +1,308 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +// Code generated by "make pluginapi" +// DO NOT EDIT + +package plugin + +import ( + "io" + "net/http" + timePkg "time" + + saml2 "github.com/mattermost/gosaml2" + "github.com/mattermost/mattermost/server/public/model" +) + +type hooksTimerLayer struct { + pluginID string + hooksImpl Hooks + metrics metricsInterface +} + +func (hooks *hooksTimerLayer) recordTime(startTime timePkg.Time, name string, success bool) { + if hooks.metrics != nil { + elapsedTime := float64(timePkg.Since(startTime)) / float64(timePkg.Second) + hooks.metrics.ObservePluginHookDuration(hooks.pluginID, name, success, elapsedTime) + } +} + +func (hooks *hooksTimerLayer) OnActivate() error { + startTime := timePkg.Now() + _returnsA := hooks.hooksImpl.OnActivate() + hooks.recordTime(startTime, "OnActivate", _returnsA == nil) + return _returnsA +} + +func (hooks *hooksTimerLayer) Implemented() ([]string, error) { + startTime := timePkg.Now() + _returnsA, _returnsB := hooks.hooksImpl.Implemented() + hooks.recordTime(startTime, "Implemented", _returnsB == nil) + return _returnsA, _returnsB +} + +func (hooks *hooksTimerLayer) OnDeactivate() error { + startTime := timePkg.Now() + _returnsA := hooks.hooksImpl.OnDeactivate() + hooks.recordTime(startTime, "OnDeactivate", _returnsA == nil) + return _returnsA +} + +func (hooks *hooksTimerLayer) OnConfigurationChange() error { + startTime := timePkg.Now() + _returnsA := hooks.hooksImpl.OnConfigurationChange() + hooks.recordTime(startTime, "OnConfigurationChange", _returnsA == nil) + return _returnsA +} + +func (hooks *hooksTimerLayer) ServeHTTP(c *Context, w http.ResponseWriter, r *http.Request) { + startTime := timePkg.Now() + hooks.hooksImpl.ServeHTTP(c, w, r) + hooks.recordTime(startTime, "ServeHTTP", true) +} + +func (hooks *hooksTimerLayer) ExecuteCommand(c *Context, args *model.CommandArgs) (*model.CommandResponse, *model.AppError) { + startTime := timePkg.Now() + _returnsA, _returnsB := hooks.hooksImpl.ExecuteCommand(c, args) + hooks.recordTime(startTime, "ExecuteCommand", _returnsB == nil) + return _returnsA, _returnsB +} + +func (hooks *hooksTimerLayer) UserHasBeenCreated(c *Context, user *model.User) { + startTime := timePkg.Now() + hooks.hooksImpl.UserHasBeenCreated(c, user) + hooks.recordTime(startTime, "UserHasBeenCreated", true) +} + +func (hooks *hooksTimerLayer) UserWillLogIn(c *Context, user *model.User) string { + startTime := timePkg.Now() + _returnsA := hooks.hooksImpl.UserWillLogIn(c, user) + hooks.recordTime(startTime, "UserWillLogIn", true) + return _returnsA +} + +func (hooks *hooksTimerLayer) UserHasLoggedIn(c *Context, user *model.User) { + startTime := timePkg.Now() + hooks.hooksImpl.UserHasLoggedIn(c, user) + hooks.recordTime(startTime, "UserHasLoggedIn", true) +} + +func (hooks *hooksTimerLayer) MessageWillBePosted(c *Context, post *model.Post) (*model.Post, string) { + startTime := timePkg.Now() + _returnsA, _returnsB := hooks.hooksImpl.MessageWillBePosted(c, post) + hooks.recordTime(startTime, "MessageWillBePosted", true) + return _returnsA, _returnsB +} + +func (hooks *hooksTimerLayer) MessageWillBeUpdated(c *Context, newPost, oldPost *model.Post) (*model.Post, string) { + startTime := timePkg.Now() + _returnsA, _returnsB := hooks.hooksImpl.MessageWillBeUpdated(c, newPost, oldPost) + hooks.recordTime(startTime, "MessageWillBeUpdated", true) + return _returnsA, _returnsB +} + +func (hooks *hooksTimerLayer) MessageHasBeenPosted(c *Context, post *model.Post) { + startTime := timePkg.Now() + hooks.hooksImpl.MessageHasBeenPosted(c, post) + hooks.recordTime(startTime, "MessageHasBeenPosted", true) +} + +func (hooks *hooksTimerLayer) MessageHasBeenUpdated(c *Context, newPost, oldPost *model.Post) { + startTime := timePkg.Now() + hooks.hooksImpl.MessageHasBeenUpdated(c, newPost, oldPost) + hooks.recordTime(startTime, "MessageHasBeenUpdated", true) +} + +func (hooks *hooksTimerLayer) MessagesWillBeConsumed(posts []*model.Post) []*model.Post { + startTime := timePkg.Now() + _returnsA := hooks.hooksImpl.MessagesWillBeConsumed(posts) + hooks.recordTime(startTime, "MessagesWillBeConsumed", true) + return _returnsA +} + +func (hooks *hooksTimerLayer) MessageHasBeenDeleted(c *Context, post *model.Post) { + startTime := timePkg.Now() + hooks.hooksImpl.MessageHasBeenDeleted(c, post) + hooks.recordTime(startTime, "MessageHasBeenDeleted", true) +} + +func (hooks *hooksTimerLayer) ChannelHasBeenCreated(c *Context, channel *model.Channel) { + startTime := timePkg.Now() + hooks.hooksImpl.ChannelHasBeenCreated(c, channel) + hooks.recordTime(startTime, "ChannelHasBeenCreated", true) +} + +func (hooks *hooksTimerLayer) UserHasJoinedChannel(c *Context, channelMember *model.ChannelMember, actor *model.User) { + startTime := timePkg.Now() + hooks.hooksImpl.UserHasJoinedChannel(c, channelMember, actor) + hooks.recordTime(startTime, "UserHasJoinedChannel", true) +} + +func (hooks *hooksTimerLayer) UserHasLeftChannel(c *Context, channelMember *model.ChannelMember, actor *model.User) { + startTime := timePkg.Now() + hooks.hooksImpl.UserHasLeftChannel(c, channelMember, actor) + hooks.recordTime(startTime, "UserHasLeftChannel", true) +} + +func (hooks *hooksTimerLayer) UserHasJoinedTeam(c *Context, teamMember *model.TeamMember, actor *model.User) { + startTime := timePkg.Now() + hooks.hooksImpl.UserHasJoinedTeam(c, teamMember, actor) + hooks.recordTime(startTime, "UserHasJoinedTeam", true) +} + +func (hooks *hooksTimerLayer) UserHasLeftTeam(c *Context, teamMember *model.TeamMember, actor *model.User) { + startTime := timePkg.Now() + hooks.hooksImpl.UserHasLeftTeam(c, teamMember, actor) + hooks.recordTime(startTime, "UserHasLeftTeam", true) +} + +func (hooks *hooksTimerLayer) FileWillBeUploaded(c *Context, info *model.FileInfo, file io.Reader, output io.Writer) (*model.FileInfo, string) { + startTime := timePkg.Now() + _returnsA, _returnsB := hooks.hooksImpl.FileWillBeUploaded(c, info, file, output) + hooks.recordTime(startTime, "FileWillBeUploaded", true) + return _returnsA, _returnsB +} + +func (hooks *hooksTimerLayer) ReactionHasBeenAdded(c *Context, reaction *model.Reaction) { + startTime := timePkg.Now() + hooks.hooksImpl.ReactionHasBeenAdded(c, reaction) + hooks.recordTime(startTime, "ReactionHasBeenAdded", true) +} + +func (hooks *hooksTimerLayer) ReactionHasBeenRemoved(c *Context, reaction *model.Reaction) { + startTime := timePkg.Now() + hooks.hooksImpl.ReactionHasBeenRemoved(c, reaction) + hooks.recordTime(startTime, "ReactionHasBeenRemoved", true) +} + +func (hooks *hooksTimerLayer) OnPluginClusterEvent(c *Context, ev model.PluginClusterEvent) { + startTime := timePkg.Now() + hooks.hooksImpl.OnPluginClusterEvent(c, ev) + hooks.recordTime(startTime, "OnPluginClusterEvent", true) +} + +func (hooks *hooksTimerLayer) OnWebSocketConnect(webConnID, userID string) { + startTime := timePkg.Now() + hooks.hooksImpl.OnWebSocketConnect(webConnID, userID) + hooks.recordTime(startTime, "OnWebSocketConnect", true) +} + +func (hooks *hooksTimerLayer) OnWebSocketDisconnect(webConnID, userID string) { + startTime := timePkg.Now() + hooks.hooksImpl.OnWebSocketDisconnect(webConnID, userID) + hooks.recordTime(startTime, "OnWebSocketDisconnect", true) +} + +func (hooks *hooksTimerLayer) WebSocketMessageHasBeenPosted(webConnID, userID string, req *model.WebSocketRequest) { + startTime := timePkg.Now() + hooks.hooksImpl.WebSocketMessageHasBeenPosted(webConnID, userID, req) + hooks.recordTime(startTime, "WebSocketMessageHasBeenPosted", true) +} + +func (hooks *hooksTimerLayer) RunDataRetention(nowTime, batchSize int64) (int64, error) { + startTime := timePkg.Now() + _returnsA, _returnsB := hooks.hooksImpl.RunDataRetention(nowTime, batchSize) + hooks.recordTime(startTime, "RunDataRetention", _returnsB == nil) + return _returnsA, _returnsB +} + +func (hooks *hooksTimerLayer) OnInstall(c *Context, event model.OnInstallEvent) error { + startTime := timePkg.Now() + _returnsA := hooks.hooksImpl.OnInstall(c, event) + hooks.recordTime(startTime, "OnInstall", _returnsA == nil) + return _returnsA +} + +func (hooks *hooksTimerLayer) OnSendDailyTelemetry() { + startTime := timePkg.Now() + hooks.hooksImpl.OnSendDailyTelemetry() + hooks.recordTime(startTime, "OnSendDailyTelemetry", true) +} + +func (hooks *hooksTimerLayer) OnCloudLimitsUpdated(limits *model.ProductLimits) { + startTime := timePkg.Now() + hooks.hooksImpl.OnCloudLimitsUpdated(limits) + hooks.recordTime(startTime, "OnCloudLimitsUpdated", true) +} + +func (hooks *hooksTimerLayer) ConfigurationWillBeSaved(newCfg *model.Config) (*model.Config, error) { + startTime := timePkg.Now() + _returnsA, _returnsB := hooks.hooksImpl.ConfigurationWillBeSaved(newCfg) + hooks.recordTime(startTime, "ConfigurationWillBeSaved", _returnsB == nil) + return _returnsA, _returnsB +} + +func (hooks *hooksTimerLayer) EmailNotificationWillBeSent(emailNotification *model.EmailNotification) (*model.EmailNotificationContent, string) { + startTime := timePkg.Now() + _returnsA, _returnsB := hooks.hooksImpl.EmailNotificationWillBeSent(emailNotification) + hooks.recordTime(startTime, "EmailNotificationWillBeSent", true) + return _returnsA, _returnsB +} + +func (hooks *hooksTimerLayer) NotificationWillBePushed(pushNotification *model.PushNotification, userID string) (*model.PushNotification, string) { + startTime := timePkg.Now() + _returnsA, _returnsB := hooks.hooksImpl.NotificationWillBePushed(pushNotification, userID) + hooks.recordTime(startTime, "NotificationWillBePushed", true) + return _returnsA, _returnsB +} + +func (hooks *hooksTimerLayer) UserHasBeenDeactivated(c *Context, user *model.User) { + startTime := timePkg.Now() + hooks.hooksImpl.UserHasBeenDeactivated(c, user) + hooks.recordTime(startTime, "UserHasBeenDeactivated", true) +} + +func (hooks *hooksTimerLayer) ServeMetrics(c *Context, w http.ResponseWriter, r *http.Request) { + startTime := timePkg.Now() + hooks.hooksImpl.ServeMetrics(c, w, r) + hooks.recordTime(startTime, "ServeMetrics", true) +} + +func (hooks *hooksTimerLayer) OnSharedChannelsSyncMsg(msg *model.SyncMsg, rc *model.RemoteCluster) (model.SyncResponse, error) { + startTime := timePkg.Now() + _returnsA, _returnsB := hooks.hooksImpl.OnSharedChannelsSyncMsg(msg, rc) + hooks.recordTime(startTime, "OnSharedChannelsSyncMsg", _returnsB == nil) + return _returnsA, _returnsB +} + +func (hooks *hooksTimerLayer) OnSharedChannelsPing(rc *model.RemoteCluster) bool { + startTime := timePkg.Now() + _returnsA := hooks.hooksImpl.OnSharedChannelsPing(rc) + hooks.recordTime(startTime, "OnSharedChannelsPing", true) + return _returnsA +} + +func (hooks *hooksTimerLayer) PreferencesHaveChanged(c *Context, preferences []model.Preference) { + startTime := timePkg.Now() + hooks.hooksImpl.PreferencesHaveChanged(c, preferences) + hooks.recordTime(startTime, "PreferencesHaveChanged", true) +} + +func (hooks *hooksTimerLayer) OnSharedChannelsAttachmentSyncMsg(fi *model.FileInfo, post *model.Post, rc *model.RemoteCluster) error { + startTime := timePkg.Now() + _returnsA := hooks.hooksImpl.OnSharedChannelsAttachmentSyncMsg(fi, post, rc) + hooks.recordTime(startTime, "OnSharedChannelsAttachmentSyncMsg", _returnsA == nil) + return _returnsA +} + +func (hooks *hooksTimerLayer) OnSharedChannelsProfileImageSyncMsg(user *model.User, rc *model.RemoteCluster) error { + startTime := timePkg.Now() + _returnsA := hooks.hooksImpl.OnSharedChannelsProfileImageSyncMsg(user, rc) + hooks.recordTime(startTime, "OnSharedChannelsProfileImageSyncMsg", _returnsA == nil) + return _returnsA +} + +func (hooks *hooksTimerLayer) GenerateSupportData(c *Context) ([]*model.FileData, error) { + startTime := timePkg.Now() + _returnsA, _returnsB := hooks.hooksImpl.GenerateSupportData(c) + hooks.recordTime(startTime, "GenerateSupportData", _returnsB == nil) + return _returnsA, _returnsB +} + +func (hooks *hooksTimerLayer) OnSAMLLogin(c *Context, user *model.User, assertion *saml2.AssertionInfo) error { + startTime := timePkg.Now() + _returnsA := hooks.hooksImpl.OnSAMLLogin(c, user, assertion) + hooks.recordTime(startTime, "OnSAMLLogin", _returnsA == nil) + return _returnsA +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/plugin/http.go b/vendor/github.com/mattermost/mattermost/server/public/plugin/http.go new file mode 100644 index 00000000..c1825665 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/plugin/http.go @@ -0,0 +1,119 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package plugin + +import ( + "bufio" + "errors" + "fmt" + "io" + "maps" + "net" + "net/http" + "net/rpc" + + "github.com/mattermost/mattermost/server/public/shared/mlog" +) + +type hijackedResponse struct { + conn net.Conn + bufrw *bufio.ReadWriter + readBuf []byte +} + +type httpResponseWriterRPCServer struct { + w http.ResponseWriter + log *mlog.Logger + hjr *hijackedResponse +} + +func (w *httpResponseWriterRPCServer) Header(args struct{}, reply *http.Header) error { + *reply = w.w.Header() + return nil +} + +func (w *httpResponseWriterRPCServer) Write(args []byte, reply *struct{}) error { + _, err := w.w.Write(args) + return err +} + +func (w *httpResponseWriterRPCServer) WriteHeader(args int, reply *struct{}) error { + // Check if args is a valid http status code. This prevents plugins from crashing the server with a panic. + // This is a copy of the checkWriteHeaderCode function in net/http/server.go in the go source. + if args < 100 || args > 999 { + w.log.Error(fmt.Sprintf("Plugin tried to write an invalid http status code: %v. Did not write the invalid header.", args)) + return errors.New("invalid http status code") + } + w.w.WriteHeader(args) + return nil +} + +func (w *httpResponseWriterRPCServer) SyncHeader(args http.Header, reply *struct{}) error { + dest := w.w.Header() + for k := range dest { + if _, ok := args[k]; !ok { + delete(dest, k) + } + } + maps.Copy(dest, args) + return nil +} + +func (w *httpResponseWriterRPCServer) Flush(args struct{}, reply *struct{}) error { + // Type assert to http.Flusher and flush if supported + if flusher, ok := w.w.(http.Flusher); ok { + flusher.Flush() + } + // If the underlying writer doesn't support Flusher, silently ignore + // This matches the HTTP spec's "best effort" semantics + return nil +} + +type httpResponseWriterRPCClient struct { + client *rpc.Client + header http.Header +} + +var _ http.ResponseWriter = (*httpResponseWriterRPCClient)(nil) +var _ http.Flusher = (*httpResponseWriterRPCClient)(nil) + +func (w *httpResponseWriterRPCClient) Header() http.Header { + if w.header == nil { + w.client.Call("Plugin.Header", struct{}{}, &w.header) + } + return w.header +} + +func (w *httpResponseWriterRPCClient) Write(b []byte) (int, error) { + if err := w.client.Call("Plugin.SyncHeader", w.header, nil); err != nil { + return 0, err + } + if err := w.client.Call("Plugin.Write", b, nil); err != nil { + return 0, err + } + return len(b), nil +} + +func (w *httpResponseWriterRPCClient) WriteHeader(statusCode int) { + if err := w.client.Call("Plugin.SyncHeader", w.header, nil); err != nil { + return + } + w.client.Call("Plugin.WriteHeader", statusCode, nil) +} + +// Flush implements http.Flusher interface +func (w *httpResponseWriterRPCClient) Flush() { + // Best-effort flush - ignore errors per HTTP spec + w.client.Call("Plugin.Flush", struct{}{}, nil) +} + +func (w *httpResponseWriterRPCClient) Close() error { + return w.client.Close() +} + +func connectHTTPResponseWriter(conn io.ReadWriteCloser) *httpResponseWriterRPCClient { + return &httpResponseWriterRPCClient{ + client: rpc.NewClient(conn), + } +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/plugin/io_rpc.go b/vendor/github.com/mattermost/mattermost/server/public/plugin/io_rpc.go new file mode 100644 index 00000000..fad7373a --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/plugin/io_rpc.go @@ -0,0 +1,46 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package plugin + +import ( + "bufio" + "encoding/binary" + "io" +) + +type remoteIOReader struct { + conn io.ReadWriteCloser +} + +func (r *remoteIOReader) Read(b []byte) (int, error) { + var buf [10]byte + n := binary.PutVarint(buf[:], int64(len(b))) + if _, err := r.conn.Write(buf[:n]); err != nil { + return 0, err + } + return r.conn.Read(b) +} + +func (r *remoteIOReader) Close() error { + return r.conn.Close() +} + +func connectIOReader(conn io.ReadWriteCloser) io.ReadCloser { + return &remoteIOReader{conn} +} + +func serveIOReader(r io.Reader, conn io.ReadWriteCloser) { + cr := bufio.NewReader(conn) + defer conn.Close() + buf := make([]byte, 32*1024) + for { + n, err := binary.ReadVarint(cr) + if err != nil { + break + } + if written, err := io.CopyBuffer(conn, io.LimitReader(r, n), buf); err != nil || written < n { + break + } + } +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/plugin/metrics.go b/vendor/github.com/mattermost/mattermost/server/public/plugin/metrics.go new file mode 100644 index 00000000..b9c2f001 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/plugin/metrics.go @@ -0,0 +1,11 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package plugin + +type metricsInterface interface { + ObservePluginHookDuration(pluginID, hookName string, success bool, elapsed float64) + ObservePluginMultiHookIterationDuration(pluginID string, elapsed float64) + ObservePluginMultiHookDuration(elapsed float64) + ObservePluginAPIDuration(pluginID, apiName string, success bool, elapsed float64) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/plugin/plugintest/api.go b/vendor/github.com/mattermost/mattermost/server/public/plugin/plugintest/api.go new file mode 100644 index 00000000..40c4dcce --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/plugin/plugintest/api.go @@ -0,0 +1,6416 @@ +// Code generated by mockery v2.53.4. DO NOT EDIT. + +// Regenerate this file using `make plugin-mocks`. + +package plugintest + +import ( + io "io" + http "net/http" + + logr "github.com/mattermost/logr/v2" + + mock "github.com/stretchr/testify/mock" + + model "github.com/mattermost/mattermost/server/public/model" +) + +// API is an autogenerated mock type for the API type +type API struct { + mock.Mock +} + +// AddChannelMember provides a mock function with given fields: channelId, userID +func (_m *API) AddChannelMember(channelId string, userID string) (*model.ChannelMember, *model.AppError) { + ret := _m.Called(channelId, userID) + + if len(ret) == 0 { + panic("no return value specified for AddChannelMember") + } + + var r0 *model.ChannelMember + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, string) (*model.ChannelMember, *model.AppError)); ok { + return rf(channelId, userID) + } + if rf, ok := ret.Get(0).(func(string, string) *model.ChannelMember); ok { + r0 = rf(channelId, userID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.ChannelMember) + } + } + + if rf, ok := ret.Get(1).(func(string, string) *model.AppError); ok { + r1 = rf(channelId, userID) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// AddReaction provides a mock function with given fields: reaction +func (_m *API) AddReaction(reaction *model.Reaction) (*model.Reaction, *model.AppError) { + ret := _m.Called(reaction) + + if len(ret) == 0 { + panic("no return value specified for AddReaction") + } + + var r0 *model.Reaction + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(*model.Reaction) (*model.Reaction, *model.AppError)); ok { + return rf(reaction) + } + if rf, ok := ret.Get(0).(func(*model.Reaction) *model.Reaction); ok { + r0 = rf(reaction) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Reaction) + } + } + + if rf, ok := ret.Get(1).(func(*model.Reaction) *model.AppError); ok { + r1 = rf(reaction) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// AddUserToChannel provides a mock function with given fields: channelId, userID, asUserId +func (_m *API) AddUserToChannel(channelId string, userID string, asUserId string) (*model.ChannelMember, *model.AppError) { + ret := _m.Called(channelId, userID, asUserId) + + if len(ret) == 0 { + panic("no return value specified for AddUserToChannel") + } + + var r0 *model.ChannelMember + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, string, string) (*model.ChannelMember, *model.AppError)); ok { + return rf(channelId, userID, asUserId) + } + if rf, ok := ret.Get(0).(func(string, string, string) *model.ChannelMember); ok { + r0 = rf(channelId, userID, asUserId) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.ChannelMember) + } + } + + if rf, ok := ret.Get(1).(func(string, string, string) *model.AppError); ok { + r1 = rf(channelId, userID, asUserId) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// CopyFileInfos provides a mock function with given fields: userID, fileIds +func (_m *API) CopyFileInfos(userID string, fileIds []string) ([]string, *model.AppError) { + ret := _m.Called(userID, fileIds) + + if len(ret) == 0 { + panic("no return value specified for CopyFileInfos") + } + + var r0 []string + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, []string) ([]string, *model.AppError)); ok { + return rf(userID, fileIds) + } + if rf, ok := ret.Get(0).(func(string, []string) []string); ok { + r0 = rf(userID, fileIds) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]string) + } + } + + if rf, ok := ret.Get(1).(func(string, []string) *model.AppError); ok { + r1 = rf(userID, fileIds) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// CountPropertyFields provides a mock function with given fields: groupID, includeDeleted +func (_m *API) CountPropertyFields(groupID string, includeDeleted bool) (int64, error) { + ret := _m.Called(groupID, includeDeleted) + + if len(ret) == 0 { + panic("no return value specified for CountPropertyFields") + } + + var r0 int64 + var r1 error + if rf, ok := ret.Get(0).(func(string, bool) (int64, error)); ok { + return rf(groupID, includeDeleted) + } + if rf, ok := ret.Get(0).(func(string, bool) int64); ok { + r0 = rf(groupID, includeDeleted) + } else { + r0 = ret.Get(0).(int64) + } + + if rf, ok := ret.Get(1).(func(string, bool) error); ok { + r1 = rf(groupID, includeDeleted) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// CountPropertyFieldsForTarget provides a mock function with given fields: groupID, targetType, targetID, includeDeleted +func (_m *API) CountPropertyFieldsForTarget(groupID string, targetType string, targetID string, includeDeleted bool) (int64, error) { + ret := _m.Called(groupID, targetType, targetID, includeDeleted) + + if len(ret) == 0 { + panic("no return value specified for CountPropertyFieldsForTarget") + } + + var r0 int64 + var r1 error + if rf, ok := ret.Get(0).(func(string, string, string, bool) (int64, error)); ok { + return rf(groupID, targetType, targetID, includeDeleted) + } + if rf, ok := ret.Get(0).(func(string, string, string, bool) int64); ok { + r0 = rf(groupID, targetType, targetID, includeDeleted) + } else { + r0 = ret.Get(0).(int64) + } + + if rf, ok := ret.Get(1).(func(string, string, string, bool) error); ok { + r1 = rf(groupID, targetType, targetID, includeDeleted) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// CreateBot provides a mock function with given fields: bot +func (_m *API) CreateBot(bot *model.Bot) (*model.Bot, *model.AppError) { + ret := _m.Called(bot) + + if len(ret) == 0 { + panic("no return value specified for CreateBot") + } + + var r0 *model.Bot + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(*model.Bot) (*model.Bot, *model.AppError)); ok { + return rf(bot) + } + if rf, ok := ret.Get(0).(func(*model.Bot) *model.Bot); ok { + r0 = rf(bot) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Bot) + } + } + + if rf, ok := ret.Get(1).(func(*model.Bot) *model.AppError); ok { + r1 = rf(bot) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// CreateChannel provides a mock function with given fields: channel +func (_m *API) CreateChannel(channel *model.Channel) (*model.Channel, *model.AppError) { + ret := _m.Called(channel) + + if len(ret) == 0 { + panic("no return value specified for CreateChannel") + } + + var r0 *model.Channel + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(*model.Channel) (*model.Channel, *model.AppError)); ok { + return rf(channel) + } + if rf, ok := ret.Get(0).(func(*model.Channel) *model.Channel); ok { + r0 = rf(channel) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Channel) + } + } + + if rf, ok := ret.Get(1).(func(*model.Channel) *model.AppError); ok { + r1 = rf(channel) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// CreateChannelSidebarCategory provides a mock function with given fields: userID, teamID, newCategory +func (_m *API) CreateChannelSidebarCategory(userID string, teamID string, newCategory *model.SidebarCategoryWithChannels) (*model.SidebarCategoryWithChannels, *model.AppError) { + ret := _m.Called(userID, teamID, newCategory) + + if len(ret) == 0 { + panic("no return value specified for CreateChannelSidebarCategory") + } + + var r0 *model.SidebarCategoryWithChannels + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, string, *model.SidebarCategoryWithChannels) (*model.SidebarCategoryWithChannels, *model.AppError)); ok { + return rf(userID, teamID, newCategory) + } + if rf, ok := ret.Get(0).(func(string, string, *model.SidebarCategoryWithChannels) *model.SidebarCategoryWithChannels); ok { + r0 = rf(userID, teamID, newCategory) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.SidebarCategoryWithChannels) + } + } + + if rf, ok := ret.Get(1).(func(string, string, *model.SidebarCategoryWithChannels) *model.AppError); ok { + r1 = rf(userID, teamID, newCategory) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// CreateCommand provides a mock function with given fields: cmd +func (_m *API) CreateCommand(cmd *model.Command) (*model.Command, error) { + ret := _m.Called(cmd) + + if len(ret) == 0 { + panic("no return value specified for CreateCommand") + } + + var r0 *model.Command + var r1 error + if rf, ok := ret.Get(0).(func(*model.Command) (*model.Command, error)); ok { + return rf(cmd) + } + if rf, ok := ret.Get(0).(func(*model.Command) *model.Command); ok { + r0 = rf(cmd) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Command) + } + } + + if rf, ok := ret.Get(1).(func(*model.Command) error); ok { + r1 = rf(cmd) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// CreateDefaultSyncableMemberships provides a mock function with given fields: params +func (_m *API) CreateDefaultSyncableMemberships(params model.CreateDefaultMembershipParams) *model.AppError { + ret := _m.Called(params) + + if len(ret) == 0 { + panic("no return value specified for CreateDefaultSyncableMemberships") + } + + var r0 *model.AppError + if rf, ok := ret.Get(0).(func(model.CreateDefaultMembershipParams) *model.AppError); ok { + r0 = rf(params) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.AppError) + } + } + + return r0 +} + +// CreateGroup provides a mock function with given fields: group +func (_m *API) CreateGroup(group *model.Group) (*model.Group, *model.AppError) { + ret := _m.Called(group) + + if len(ret) == 0 { + panic("no return value specified for CreateGroup") + } + + var r0 *model.Group + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(*model.Group) (*model.Group, *model.AppError)); ok { + return rf(group) + } + if rf, ok := ret.Get(0).(func(*model.Group) *model.Group); ok { + r0 = rf(group) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Group) + } + } + + if rf, ok := ret.Get(1).(func(*model.Group) *model.AppError); ok { + r1 = rf(group) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// CreateOAuthApp provides a mock function with given fields: app +func (_m *API) CreateOAuthApp(app *model.OAuthApp) (*model.OAuthApp, *model.AppError) { + ret := _m.Called(app) + + if len(ret) == 0 { + panic("no return value specified for CreateOAuthApp") + } + + var r0 *model.OAuthApp + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(*model.OAuthApp) (*model.OAuthApp, *model.AppError)); ok { + return rf(app) + } + if rf, ok := ret.Get(0).(func(*model.OAuthApp) *model.OAuthApp); ok { + r0 = rf(app) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.OAuthApp) + } + } + + if rf, ok := ret.Get(1).(func(*model.OAuthApp) *model.AppError); ok { + r1 = rf(app) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// CreatePost provides a mock function with given fields: post +func (_m *API) CreatePost(post *model.Post) (*model.Post, *model.AppError) { + ret := _m.Called(post) + + if len(ret) == 0 { + panic("no return value specified for CreatePost") + } + + var r0 *model.Post + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(*model.Post) (*model.Post, *model.AppError)); ok { + return rf(post) + } + if rf, ok := ret.Get(0).(func(*model.Post) *model.Post); ok { + r0 = rf(post) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Post) + } + } + + if rf, ok := ret.Get(1).(func(*model.Post) *model.AppError); ok { + r1 = rf(post) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// CreatePropertyField provides a mock function with given fields: field +func (_m *API) CreatePropertyField(field *model.PropertyField) (*model.PropertyField, error) { + ret := _m.Called(field) + + if len(ret) == 0 { + panic("no return value specified for CreatePropertyField") + } + + var r0 *model.PropertyField + var r1 error + if rf, ok := ret.Get(0).(func(*model.PropertyField) (*model.PropertyField, error)); ok { + return rf(field) + } + if rf, ok := ret.Get(0).(func(*model.PropertyField) *model.PropertyField); ok { + r0 = rf(field) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.PropertyField) + } + } + + if rf, ok := ret.Get(1).(func(*model.PropertyField) error); ok { + r1 = rf(field) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// CreatePropertyValue provides a mock function with given fields: value +func (_m *API) CreatePropertyValue(value *model.PropertyValue) (*model.PropertyValue, error) { + ret := _m.Called(value) + + if len(ret) == 0 { + panic("no return value specified for CreatePropertyValue") + } + + var r0 *model.PropertyValue + var r1 error + if rf, ok := ret.Get(0).(func(*model.PropertyValue) (*model.PropertyValue, error)); ok { + return rf(value) + } + if rf, ok := ret.Get(0).(func(*model.PropertyValue) *model.PropertyValue); ok { + r0 = rf(value) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.PropertyValue) + } + } + + if rf, ok := ret.Get(1).(func(*model.PropertyValue) error); ok { + r1 = rf(value) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// CreateSession provides a mock function with given fields: session +func (_m *API) CreateSession(session *model.Session) (*model.Session, *model.AppError) { + ret := _m.Called(session) + + if len(ret) == 0 { + panic("no return value specified for CreateSession") + } + + var r0 *model.Session + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(*model.Session) (*model.Session, *model.AppError)); ok { + return rf(session) + } + if rf, ok := ret.Get(0).(func(*model.Session) *model.Session); ok { + r0 = rf(session) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Session) + } + } + + if rf, ok := ret.Get(1).(func(*model.Session) *model.AppError); ok { + r1 = rf(session) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// CreateTeam provides a mock function with given fields: team +func (_m *API) CreateTeam(team *model.Team) (*model.Team, *model.AppError) { + ret := _m.Called(team) + + if len(ret) == 0 { + panic("no return value specified for CreateTeam") + } + + var r0 *model.Team + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(*model.Team) (*model.Team, *model.AppError)); ok { + return rf(team) + } + if rf, ok := ret.Get(0).(func(*model.Team) *model.Team); ok { + r0 = rf(team) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Team) + } + } + + if rf, ok := ret.Get(1).(func(*model.Team) *model.AppError); ok { + r1 = rf(team) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// CreateTeamMember provides a mock function with given fields: teamID, userID +func (_m *API) CreateTeamMember(teamID string, userID string) (*model.TeamMember, *model.AppError) { + ret := _m.Called(teamID, userID) + + if len(ret) == 0 { + panic("no return value specified for CreateTeamMember") + } + + var r0 *model.TeamMember + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, string) (*model.TeamMember, *model.AppError)); ok { + return rf(teamID, userID) + } + if rf, ok := ret.Get(0).(func(string, string) *model.TeamMember); ok { + r0 = rf(teamID, userID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.TeamMember) + } + } + + if rf, ok := ret.Get(1).(func(string, string) *model.AppError); ok { + r1 = rf(teamID, userID) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// CreateTeamMembers provides a mock function with given fields: teamID, userIds, requestorId +func (_m *API) CreateTeamMembers(teamID string, userIds []string, requestorId string) ([]*model.TeamMember, *model.AppError) { + ret := _m.Called(teamID, userIds, requestorId) + + if len(ret) == 0 { + panic("no return value specified for CreateTeamMembers") + } + + var r0 []*model.TeamMember + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, []string, string) ([]*model.TeamMember, *model.AppError)); ok { + return rf(teamID, userIds, requestorId) + } + if rf, ok := ret.Get(0).(func(string, []string, string) []*model.TeamMember); ok { + r0 = rf(teamID, userIds, requestorId) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.TeamMember) + } + } + + if rf, ok := ret.Get(1).(func(string, []string, string) *model.AppError); ok { + r1 = rf(teamID, userIds, requestorId) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// CreateTeamMembersGracefully provides a mock function with given fields: teamID, userIds, requestorId +func (_m *API) CreateTeamMembersGracefully(teamID string, userIds []string, requestorId string) ([]*model.TeamMemberWithError, *model.AppError) { + ret := _m.Called(teamID, userIds, requestorId) + + if len(ret) == 0 { + panic("no return value specified for CreateTeamMembersGracefully") + } + + var r0 []*model.TeamMemberWithError + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, []string, string) ([]*model.TeamMemberWithError, *model.AppError)); ok { + return rf(teamID, userIds, requestorId) + } + if rf, ok := ret.Get(0).(func(string, []string, string) []*model.TeamMemberWithError); ok { + r0 = rf(teamID, userIds, requestorId) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.TeamMemberWithError) + } + } + + if rf, ok := ret.Get(1).(func(string, []string, string) *model.AppError); ok { + r1 = rf(teamID, userIds, requestorId) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// CreateUploadSession provides a mock function with given fields: us +func (_m *API) CreateUploadSession(us *model.UploadSession) (*model.UploadSession, error) { + ret := _m.Called(us) + + if len(ret) == 0 { + panic("no return value specified for CreateUploadSession") + } + + var r0 *model.UploadSession + var r1 error + if rf, ok := ret.Get(0).(func(*model.UploadSession) (*model.UploadSession, error)); ok { + return rf(us) + } + if rf, ok := ret.Get(0).(func(*model.UploadSession) *model.UploadSession); ok { + r0 = rf(us) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.UploadSession) + } + } + + if rf, ok := ret.Get(1).(func(*model.UploadSession) error); ok { + r1 = rf(us) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// CreateUser provides a mock function with given fields: user +func (_m *API) CreateUser(user *model.User) (*model.User, *model.AppError) { + ret := _m.Called(user) + + if len(ret) == 0 { + panic("no return value specified for CreateUser") + } + + var r0 *model.User + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(*model.User) (*model.User, *model.AppError)); ok { + return rf(user) + } + if rf, ok := ret.Get(0).(func(*model.User) *model.User); ok { + r0 = rf(user) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.User) + } + } + + if rf, ok := ret.Get(1).(func(*model.User) *model.AppError); ok { + r1 = rf(user) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// CreateUserAccessToken provides a mock function with given fields: token +func (_m *API) CreateUserAccessToken(token *model.UserAccessToken) (*model.UserAccessToken, *model.AppError) { + ret := _m.Called(token) + + if len(ret) == 0 { + panic("no return value specified for CreateUserAccessToken") + } + + var r0 *model.UserAccessToken + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(*model.UserAccessToken) (*model.UserAccessToken, *model.AppError)); ok { + return rf(token) + } + if rf, ok := ret.Get(0).(func(*model.UserAccessToken) *model.UserAccessToken); ok { + r0 = rf(token) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.UserAccessToken) + } + } + + if rf, ok := ret.Get(1).(func(*model.UserAccessToken) *model.AppError); ok { + r1 = rf(token) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// DeleteChannel provides a mock function with given fields: channelId +func (_m *API) DeleteChannel(channelId string) *model.AppError { + ret := _m.Called(channelId) + + if len(ret) == 0 { + panic("no return value specified for DeleteChannel") + } + + var r0 *model.AppError + if rf, ok := ret.Get(0).(func(string) *model.AppError); ok { + r0 = rf(channelId) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.AppError) + } + } + + return r0 +} + +// DeleteChannelMember provides a mock function with given fields: channelId, userID +func (_m *API) DeleteChannelMember(channelId string, userID string) *model.AppError { + ret := _m.Called(channelId, userID) + + if len(ret) == 0 { + panic("no return value specified for DeleteChannelMember") + } + + var r0 *model.AppError + if rf, ok := ret.Get(0).(func(string, string) *model.AppError); ok { + r0 = rf(channelId, userID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.AppError) + } + } + + return r0 +} + +// DeleteCommand provides a mock function with given fields: commandID +func (_m *API) DeleteCommand(commandID string) error { + ret := _m.Called(commandID) + + if len(ret) == 0 { + panic("no return value specified for DeleteCommand") + } + + var r0 error + if rf, ok := ret.Get(0).(func(string) error); ok { + r0 = rf(commandID) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// DeleteEphemeralPost provides a mock function with given fields: userID, postId +func (_m *API) DeleteEphemeralPost(userID string, postId string) { + _m.Called(userID, postId) +} + +// DeleteGroup provides a mock function with given fields: groupID +func (_m *API) DeleteGroup(groupID string) (*model.Group, *model.AppError) { + ret := _m.Called(groupID) + + if len(ret) == 0 { + panic("no return value specified for DeleteGroup") + } + + var r0 *model.Group + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string) (*model.Group, *model.AppError)); ok { + return rf(groupID) + } + if rf, ok := ret.Get(0).(func(string) *model.Group); ok { + r0 = rf(groupID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Group) + } + } + + if rf, ok := ret.Get(1).(func(string) *model.AppError); ok { + r1 = rf(groupID) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// DeleteGroupConstrainedMemberships provides a mock function with no fields +func (_m *API) DeleteGroupConstrainedMemberships() *model.AppError { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for DeleteGroupConstrainedMemberships") + } + + var r0 *model.AppError + if rf, ok := ret.Get(0).(func() *model.AppError); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.AppError) + } + } + + return r0 +} + +// DeleteGroupMember provides a mock function with given fields: groupID, userID +func (_m *API) DeleteGroupMember(groupID string, userID string) (*model.GroupMember, *model.AppError) { + ret := _m.Called(groupID, userID) + + if len(ret) == 0 { + panic("no return value specified for DeleteGroupMember") + } + + var r0 *model.GroupMember + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, string) (*model.GroupMember, *model.AppError)); ok { + return rf(groupID, userID) + } + if rf, ok := ret.Get(0).(func(string, string) *model.GroupMember); ok { + r0 = rf(groupID, userID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.GroupMember) + } + } + + if rf, ok := ret.Get(1).(func(string, string) *model.AppError); ok { + r1 = rf(groupID, userID) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// DeleteGroupSyncable provides a mock function with given fields: groupID, syncableID, syncableType +func (_m *API) DeleteGroupSyncable(groupID string, syncableID string, syncableType model.GroupSyncableType) (*model.GroupSyncable, *model.AppError) { + ret := _m.Called(groupID, syncableID, syncableType) + + if len(ret) == 0 { + panic("no return value specified for DeleteGroupSyncable") + } + + var r0 *model.GroupSyncable + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, string, model.GroupSyncableType) (*model.GroupSyncable, *model.AppError)); ok { + return rf(groupID, syncableID, syncableType) + } + if rf, ok := ret.Get(0).(func(string, string, model.GroupSyncableType) *model.GroupSyncable); ok { + r0 = rf(groupID, syncableID, syncableType) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.GroupSyncable) + } + } + + if rf, ok := ret.Get(1).(func(string, string, model.GroupSyncableType) *model.AppError); ok { + r1 = rf(groupID, syncableID, syncableType) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// DeleteOAuthApp provides a mock function with given fields: appID +func (_m *API) DeleteOAuthApp(appID string) *model.AppError { + ret := _m.Called(appID) + + if len(ret) == 0 { + panic("no return value specified for DeleteOAuthApp") + } + + var r0 *model.AppError + if rf, ok := ret.Get(0).(func(string) *model.AppError); ok { + r0 = rf(appID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.AppError) + } + } + + return r0 +} + +// DeletePost provides a mock function with given fields: postId +func (_m *API) DeletePost(postId string) *model.AppError { + ret := _m.Called(postId) + + if len(ret) == 0 { + panic("no return value specified for DeletePost") + } + + var r0 *model.AppError + if rf, ok := ret.Get(0).(func(string) *model.AppError); ok { + r0 = rf(postId) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.AppError) + } + } + + return r0 +} + +// DeletePreferencesForUser provides a mock function with given fields: userID, preferences +func (_m *API) DeletePreferencesForUser(userID string, preferences []model.Preference) *model.AppError { + ret := _m.Called(userID, preferences) + + if len(ret) == 0 { + panic("no return value specified for DeletePreferencesForUser") + } + + var r0 *model.AppError + if rf, ok := ret.Get(0).(func(string, []model.Preference) *model.AppError); ok { + r0 = rf(userID, preferences) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.AppError) + } + } + + return r0 +} + +// DeletePropertyField provides a mock function with given fields: groupID, fieldID +func (_m *API) DeletePropertyField(groupID string, fieldID string) error { + ret := _m.Called(groupID, fieldID) + + if len(ret) == 0 { + panic("no return value specified for DeletePropertyField") + } + + var r0 error + if rf, ok := ret.Get(0).(func(string, string) error); ok { + r0 = rf(groupID, fieldID) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// DeletePropertyValue provides a mock function with given fields: groupID, valueID +func (_m *API) DeletePropertyValue(groupID string, valueID string) error { + ret := _m.Called(groupID, valueID) + + if len(ret) == 0 { + panic("no return value specified for DeletePropertyValue") + } + + var r0 error + if rf, ok := ret.Get(0).(func(string, string) error); ok { + r0 = rf(groupID, valueID) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// DeletePropertyValuesForField provides a mock function with given fields: groupID, fieldID +func (_m *API) DeletePropertyValuesForField(groupID string, fieldID string) error { + ret := _m.Called(groupID, fieldID) + + if len(ret) == 0 { + panic("no return value specified for DeletePropertyValuesForField") + } + + var r0 error + if rf, ok := ret.Get(0).(func(string, string) error); ok { + r0 = rf(groupID, fieldID) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// DeletePropertyValuesForTarget provides a mock function with given fields: groupID, targetType, targetID +func (_m *API) DeletePropertyValuesForTarget(groupID string, targetType string, targetID string) error { + ret := _m.Called(groupID, targetType, targetID) + + if len(ret) == 0 { + panic("no return value specified for DeletePropertyValuesForTarget") + } + + var r0 error + if rf, ok := ret.Get(0).(func(string, string, string) error); ok { + r0 = rf(groupID, targetType, targetID) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// DeleteTeam provides a mock function with given fields: teamID +func (_m *API) DeleteTeam(teamID string) *model.AppError { + ret := _m.Called(teamID) + + if len(ret) == 0 { + panic("no return value specified for DeleteTeam") + } + + var r0 *model.AppError + if rf, ok := ret.Get(0).(func(string) *model.AppError); ok { + r0 = rf(teamID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.AppError) + } + } + + return r0 +} + +// DeleteTeamMember provides a mock function with given fields: teamID, userID, requestorId +func (_m *API) DeleteTeamMember(teamID string, userID string, requestorId string) *model.AppError { + ret := _m.Called(teamID, userID, requestorId) + + if len(ret) == 0 { + panic("no return value specified for DeleteTeamMember") + } + + var r0 *model.AppError + if rf, ok := ret.Get(0).(func(string, string, string) *model.AppError); ok { + r0 = rf(teamID, userID, requestorId) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.AppError) + } + } + + return r0 +} + +// DeleteUser provides a mock function with given fields: userID +func (_m *API) DeleteUser(userID string) *model.AppError { + ret := _m.Called(userID) + + if len(ret) == 0 { + panic("no return value specified for DeleteUser") + } + + var r0 *model.AppError + if rf, ok := ret.Get(0).(func(string) *model.AppError); ok { + r0 = rf(userID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.AppError) + } + } + + return r0 +} + +// DisablePlugin provides a mock function with given fields: id +func (_m *API) DisablePlugin(id string) *model.AppError { + ret := _m.Called(id) + + if len(ret) == 0 { + panic("no return value specified for DisablePlugin") + } + + var r0 *model.AppError + if rf, ok := ret.Get(0).(func(string) *model.AppError); ok { + r0 = rf(id) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.AppError) + } + } + + return r0 +} + +// EnablePlugin provides a mock function with given fields: id +func (_m *API) EnablePlugin(id string) *model.AppError { + ret := _m.Called(id) + + if len(ret) == 0 { + panic("no return value specified for EnablePlugin") + } + + var r0 *model.AppError + if rf, ok := ret.Get(0).(func(string) *model.AppError); ok { + r0 = rf(id) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.AppError) + } + } + + return r0 +} + +// EnsureBotUser provides a mock function with given fields: bot +func (_m *API) EnsureBotUser(bot *model.Bot) (string, error) { + ret := _m.Called(bot) + + if len(ret) == 0 { + panic("no return value specified for EnsureBotUser") + } + + var r0 string + var r1 error + if rf, ok := ret.Get(0).(func(*model.Bot) (string, error)); ok { + return rf(bot) + } + if rf, ok := ret.Get(0).(func(*model.Bot) string); ok { + r0 = rf(bot) + } else { + r0 = ret.Get(0).(string) + } + + if rf, ok := ret.Get(1).(func(*model.Bot) error); ok { + r1 = rf(bot) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// ExecuteSlashCommand provides a mock function with given fields: commandArgs +func (_m *API) ExecuteSlashCommand(commandArgs *model.CommandArgs) (*model.CommandResponse, error) { + ret := _m.Called(commandArgs) + + if len(ret) == 0 { + panic("no return value specified for ExecuteSlashCommand") + } + + var r0 *model.CommandResponse + var r1 error + if rf, ok := ret.Get(0).(func(*model.CommandArgs) (*model.CommandResponse, error)); ok { + return rf(commandArgs) + } + if rf, ok := ret.Get(0).(func(*model.CommandArgs) *model.CommandResponse); ok { + r0 = rf(commandArgs) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.CommandResponse) + } + } + + if rf, ok := ret.Get(1).(func(*model.CommandArgs) error); ok { + r1 = rf(commandArgs) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// ExtendSessionExpiry provides a mock function with given fields: sessionID, newExpiry +func (_m *API) ExtendSessionExpiry(sessionID string, newExpiry int64) *model.AppError { + ret := _m.Called(sessionID, newExpiry) + + if len(ret) == 0 { + panic("no return value specified for ExtendSessionExpiry") + } + + var r0 *model.AppError + if rf, ok := ret.Get(0).(func(string, int64) *model.AppError); ok { + r0 = rf(sessionID, newExpiry) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.AppError) + } + } + + return r0 +} + +// GetBot provides a mock function with given fields: botUserId, includeDeleted +func (_m *API) GetBot(botUserId string, includeDeleted bool) (*model.Bot, *model.AppError) { + ret := _m.Called(botUserId, includeDeleted) + + if len(ret) == 0 { + panic("no return value specified for GetBot") + } + + var r0 *model.Bot + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, bool) (*model.Bot, *model.AppError)); ok { + return rf(botUserId, includeDeleted) + } + if rf, ok := ret.Get(0).(func(string, bool) *model.Bot); ok { + r0 = rf(botUserId, includeDeleted) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Bot) + } + } + + if rf, ok := ret.Get(1).(func(string, bool) *model.AppError); ok { + r1 = rf(botUserId, includeDeleted) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetBots provides a mock function with given fields: options +func (_m *API) GetBots(options *model.BotGetOptions) ([]*model.Bot, *model.AppError) { + ret := _m.Called(options) + + if len(ret) == 0 { + panic("no return value specified for GetBots") + } + + var r0 []*model.Bot + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(*model.BotGetOptions) ([]*model.Bot, *model.AppError)); ok { + return rf(options) + } + if rf, ok := ret.Get(0).(func(*model.BotGetOptions) []*model.Bot); ok { + r0 = rf(options) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.Bot) + } + } + + if rf, ok := ret.Get(1).(func(*model.BotGetOptions) *model.AppError); ok { + r1 = rf(options) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetBundlePath provides a mock function with no fields +func (_m *API) GetBundlePath() (string, error) { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for GetBundlePath") + } + + var r0 string + var r1 error + if rf, ok := ret.Get(0).(func() (string, error)); ok { + return rf() + } + if rf, ok := ret.Get(0).(func() string); ok { + r0 = rf() + } else { + r0 = ret.Get(0).(string) + } + + if rf, ok := ret.Get(1).(func() error); ok { + r1 = rf() + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetChannel provides a mock function with given fields: channelId +func (_m *API) GetChannel(channelId string) (*model.Channel, *model.AppError) { + ret := _m.Called(channelId) + + if len(ret) == 0 { + panic("no return value specified for GetChannel") + } + + var r0 *model.Channel + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string) (*model.Channel, *model.AppError)); ok { + return rf(channelId) + } + if rf, ok := ret.Get(0).(func(string) *model.Channel); ok { + r0 = rf(channelId) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Channel) + } + } + + if rf, ok := ret.Get(1).(func(string) *model.AppError); ok { + r1 = rf(channelId) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetChannelByName provides a mock function with given fields: teamID, name, includeDeleted +func (_m *API) GetChannelByName(teamID string, name string, includeDeleted bool) (*model.Channel, *model.AppError) { + ret := _m.Called(teamID, name, includeDeleted) + + if len(ret) == 0 { + panic("no return value specified for GetChannelByName") + } + + var r0 *model.Channel + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, string, bool) (*model.Channel, *model.AppError)); ok { + return rf(teamID, name, includeDeleted) + } + if rf, ok := ret.Get(0).(func(string, string, bool) *model.Channel); ok { + r0 = rf(teamID, name, includeDeleted) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Channel) + } + } + + if rf, ok := ret.Get(1).(func(string, string, bool) *model.AppError); ok { + r1 = rf(teamID, name, includeDeleted) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetChannelByNameForTeamName provides a mock function with given fields: teamName, channelName, includeDeleted +func (_m *API) GetChannelByNameForTeamName(teamName string, channelName string, includeDeleted bool) (*model.Channel, *model.AppError) { + ret := _m.Called(teamName, channelName, includeDeleted) + + if len(ret) == 0 { + panic("no return value specified for GetChannelByNameForTeamName") + } + + var r0 *model.Channel + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, string, bool) (*model.Channel, *model.AppError)); ok { + return rf(teamName, channelName, includeDeleted) + } + if rf, ok := ret.Get(0).(func(string, string, bool) *model.Channel); ok { + r0 = rf(teamName, channelName, includeDeleted) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Channel) + } + } + + if rf, ok := ret.Get(1).(func(string, string, bool) *model.AppError); ok { + r1 = rf(teamName, channelName, includeDeleted) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetChannelMember provides a mock function with given fields: channelId, userID +func (_m *API) GetChannelMember(channelId string, userID string) (*model.ChannelMember, *model.AppError) { + ret := _m.Called(channelId, userID) + + if len(ret) == 0 { + panic("no return value specified for GetChannelMember") + } + + var r0 *model.ChannelMember + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, string) (*model.ChannelMember, *model.AppError)); ok { + return rf(channelId, userID) + } + if rf, ok := ret.Get(0).(func(string, string) *model.ChannelMember); ok { + r0 = rf(channelId, userID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.ChannelMember) + } + } + + if rf, ok := ret.Get(1).(func(string, string) *model.AppError); ok { + r1 = rf(channelId, userID) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetChannelMembers provides a mock function with given fields: channelId, page, perPage +func (_m *API) GetChannelMembers(channelId string, page int, perPage int) (model.ChannelMembers, *model.AppError) { + ret := _m.Called(channelId, page, perPage) + + if len(ret) == 0 { + panic("no return value specified for GetChannelMembers") + } + + var r0 model.ChannelMembers + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, int, int) (model.ChannelMembers, *model.AppError)); ok { + return rf(channelId, page, perPage) + } + if rf, ok := ret.Get(0).(func(string, int, int) model.ChannelMembers); ok { + r0 = rf(channelId, page, perPage) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(model.ChannelMembers) + } + } + + if rf, ok := ret.Get(1).(func(string, int, int) *model.AppError); ok { + r1 = rf(channelId, page, perPage) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetChannelMembersByIds provides a mock function with given fields: channelId, userIds +func (_m *API) GetChannelMembersByIds(channelId string, userIds []string) (model.ChannelMembers, *model.AppError) { + ret := _m.Called(channelId, userIds) + + if len(ret) == 0 { + panic("no return value specified for GetChannelMembersByIds") + } + + var r0 model.ChannelMembers + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, []string) (model.ChannelMembers, *model.AppError)); ok { + return rf(channelId, userIds) + } + if rf, ok := ret.Get(0).(func(string, []string) model.ChannelMembers); ok { + r0 = rf(channelId, userIds) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(model.ChannelMembers) + } + } + + if rf, ok := ret.Get(1).(func(string, []string) *model.AppError); ok { + r1 = rf(channelId, userIds) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetChannelMembersForUser provides a mock function with given fields: teamID, userID, page, perPage +func (_m *API) GetChannelMembersForUser(teamID string, userID string, page int, perPage int) ([]*model.ChannelMember, *model.AppError) { + ret := _m.Called(teamID, userID, page, perPage) + + if len(ret) == 0 { + panic("no return value specified for GetChannelMembersForUser") + } + + var r0 []*model.ChannelMember + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, string, int, int) ([]*model.ChannelMember, *model.AppError)); ok { + return rf(teamID, userID, page, perPage) + } + if rf, ok := ret.Get(0).(func(string, string, int, int) []*model.ChannelMember); ok { + r0 = rf(teamID, userID, page, perPage) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.ChannelMember) + } + } + + if rf, ok := ret.Get(1).(func(string, string, int, int) *model.AppError); ok { + r1 = rf(teamID, userID, page, perPage) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetChannelSidebarCategories provides a mock function with given fields: userID, teamID +func (_m *API) GetChannelSidebarCategories(userID string, teamID string) (*model.OrderedSidebarCategories, *model.AppError) { + ret := _m.Called(userID, teamID) + + if len(ret) == 0 { + panic("no return value specified for GetChannelSidebarCategories") + } + + var r0 *model.OrderedSidebarCategories + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, string) (*model.OrderedSidebarCategories, *model.AppError)); ok { + return rf(userID, teamID) + } + if rf, ok := ret.Get(0).(func(string, string) *model.OrderedSidebarCategories); ok { + r0 = rf(userID, teamID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.OrderedSidebarCategories) + } + } + + if rf, ok := ret.Get(1).(func(string, string) *model.AppError); ok { + r1 = rf(userID, teamID) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetChannelStats provides a mock function with given fields: channelId +func (_m *API) GetChannelStats(channelId string) (*model.ChannelStats, *model.AppError) { + ret := _m.Called(channelId) + + if len(ret) == 0 { + panic("no return value specified for GetChannelStats") + } + + var r0 *model.ChannelStats + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string) (*model.ChannelStats, *model.AppError)); ok { + return rf(channelId) + } + if rf, ok := ret.Get(0).(func(string) *model.ChannelStats); ok { + r0 = rf(channelId) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.ChannelStats) + } + } + + if rf, ok := ret.Get(1).(func(string) *model.AppError); ok { + r1 = rf(channelId) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetChannelsForTeamForUser provides a mock function with given fields: teamID, userID, includeDeleted +func (_m *API) GetChannelsForTeamForUser(teamID string, userID string, includeDeleted bool) ([]*model.Channel, *model.AppError) { + ret := _m.Called(teamID, userID, includeDeleted) + + if len(ret) == 0 { + panic("no return value specified for GetChannelsForTeamForUser") + } + + var r0 []*model.Channel + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, string, bool) ([]*model.Channel, *model.AppError)); ok { + return rf(teamID, userID, includeDeleted) + } + if rf, ok := ret.Get(0).(func(string, string, bool) []*model.Channel); ok { + r0 = rf(teamID, userID, includeDeleted) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.Channel) + } + } + + if rf, ok := ret.Get(1).(func(string, string, bool) *model.AppError); ok { + r1 = rf(teamID, userID, includeDeleted) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetCloudLimits provides a mock function with no fields +func (_m *API) GetCloudLimits() (*model.ProductLimits, error) { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for GetCloudLimits") + } + + var r0 *model.ProductLimits + var r1 error + if rf, ok := ret.Get(0).(func() (*model.ProductLimits, error)); ok { + return rf() + } + if rf, ok := ret.Get(0).(func() *model.ProductLimits); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.ProductLimits) + } + } + + if rf, ok := ret.Get(1).(func() error); ok { + r1 = rf() + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetCommand provides a mock function with given fields: commandID +func (_m *API) GetCommand(commandID string) (*model.Command, error) { + ret := _m.Called(commandID) + + if len(ret) == 0 { + panic("no return value specified for GetCommand") + } + + var r0 *model.Command + var r1 error + if rf, ok := ret.Get(0).(func(string) (*model.Command, error)); ok { + return rf(commandID) + } + if rf, ok := ret.Get(0).(func(string) *model.Command); ok { + r0 = rf(commandID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Command) + } + } + + if rf, ok := ret.Get(1).(func(string) error); ok { + r1 = rf(commandID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetConfig provides a mock function with no fields +func (_m *API) GetConfig() *model.Config { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for GetConfig") + } + + var r0 *model.Config + if rf, ok := ret.Get(0).(func() *model.Config); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Config) + } + } + + return r0 +} + +// GetDiagnosticId provides a mock function with no fields +func (_m *API) GetDiagnosticId() string { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for GetDiagnosticId") + } + + var r0 string + if rf, ok := ret.Get(0).(func() string); ok { + r0 = rf() + } else { + r0 = ret.Get(0).(string) + } + + return r0 +} + +// GetDirectChannel provides a mock function with given fields: userId1, userId2 +func (_m *API) GetDirectChannel(userId1 string, userId2 string) (*model.Channel, *model.AppError) { + ret := _m.Called(userId1, userId2) + + if len(ret) == 0 { + panic("no return value specified for GetDirectChannel") + } + + var r0 *model.Channel + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, string) (*model.Channel, *model.AppError)); ok { + return rf(userId1, userId2) + } + if rf, ok := ret.Get(0).(func(string, string) *model.Channel); ok { + r0 = rf(userId1, userId2) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Channel) + } + } + + if rf, ok := ret.Get(1).(func(string, string) *model.AppError); ok { + r1 = rf(userId1, userId2) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetEmoji provides a mock function with given fields: emojiId +func (_m *API) GetEmoji(emojiId string) (*model.Emoji, *model.AppError) { + ret := _m.Called(emojiId) + + if len(ret) == 0 { + panic("no return value specified for GetEmoji") + } + + var r0 *model.Emoji + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string) (*model.Emoji, *model.AppError)); ok { + return rf(emojiId) + } + if rf, ok := ret.Get(0).(func(string) *model.Emoji); ok { + r0 = rf(emojiId) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Emoji) + } + } + + if rf, ok := ret.Get(1).(func(string) *model.AppError); ok { + r1 = rf(emojiId) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetEmojiByName provides a mock function with given fields: name +func (_m *API) GetEmojiByName(name string) (*model.Emoji, *model.AppError) { + ret := _m.Called(name) + + if len(ret) == 0 { + panic("no return value specified for GetEmojiByName") + } + + var r0 *model.Emoji + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string) (*model.Emoji, *model.AppError)); ok { + return rf(name) + } + if rf, ok := ret.Get(0).(func(string) *model.Emoji); ok { + r0 = rf(name) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Emoji) + } + } + + if rf, ok := ret.Get(1).(func(string) *model.AppError); ok { + r1 = rf(name) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetEmojiImage provides a mock function with given fields: emojiId +func (_m *API) GetEmojiImage(emojiId string) ([]byte, string, *model.AppError) { + ret := _m.Called(emojiId) + + if len(ret) == 0 { + panic("no return value specified for GetEmojiImage") + } + + var r0 []byte + var r1 string + var r2 *model.AppError + if rf, ok := ret.Get(0).(func(string) ([]byte, string, *model.AppError)); ok { + return rf(emojiId) + } + if rf, ok := ret.Get(0).(func(string) []byte); ok { + r0 = rf(emojiId) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]byte) + } + } + + if rf, ok := ret.Get(1).(func(string) string); ok { + r1 = rf(emojiId) + } else { + r1 = ret.Get(1).(string) + } + + if rf, ok := ret.Get(2).(func(string) *model.AppError); ok { + r2 = rf(emojiId) + } else { + if ret.Get(2) != nil { + r2 = ret.Get(2).(*model.AppError) + } + } + + return r0, r1, r2 +} + +// GetEmojiList provides a mock function with given fields: sortBy, page, perPage +func (_m *API) GetEmojiList(sortBy string, page int, perPage int) ([]*model.Emoji, *model.AppError) { + ret := _m.Called(sortBy, page, perPage) + + if len(ret) == 0 { + panic("no return value specified for GetEmojiList") + } + + var r0 []*model.Emoji + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, int, int) ([]*model.Emoji, *model.AppError)); ok { + return rf(sortBy, page, perPage) + } + if rf, ok := ret.Get(0).(func(string, int, int) []*model.Emoji); ok { + r0 = rf(sortBy, page, perPage) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.Emoji) + } + } + + if rf, ok := ret.Get(1).(func(string, int, int) *model.AppError); ok { + r1 = rf(sortBy, page, perPage) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetFile provides a mock function with given fields: fileId +func (_m *API) GetFile(fileId string) ([]byte, *model.AppError) { + ret := _m.Called(fileId) + + if len(ret) == 0 { + panic("no return value specified for GetFile") + } + + var r0 []byte + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string) ([]byte, *model.AppError)); ok { + return rf(fileId) + } + if rf, ok := ret.Get(0).(func(string) []byte); ok { + r0 = rf(fileId) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]byte) + } + } + + if rf, ok := ret.Get(1).(func(string) *model.AppError); ok { + r1 = rf(fileId) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetFileInfo provides a mock function with given fields: fileId +func (_m *API) GetFileInfo(fileId string) (*model.FileInfo, *model.AppError) { + ret := _m.Called(fileId) + + if len(ret) == 0 { + panic("no return value specified for GetFileInfo") + } + + var r0 *model.FileInfo + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string) (*model.FileInfo, *model.AppError)); ok { + return rf(fileId) + } + if rf, ok := ret.Get(0).(func(string) *model.FileInfo); ok { + r0 = rf(fileId) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.FileInfo) + } + } + + if rf, ok := ret.Get(1).(func(string) *model.AppError); ok { + r1 = rf(fileId) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetFileInfos provides a mock function with given fields: page, perPage, opt +func (_m *API) GetFileInfos(page int, perPage int, opt *model.GetFileInfosOptions) ([]*model.FileInfo, *model.AppError) { + ret := _m.Called(page, perPage, opt) + + if len(ret) == 0 { + panic("no return value specified for GetFileInfos") + } + + var r0 []*model.FileInfo + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(int, int, *model.GetFileInfosOptions) ([]*model.FileInfo, *model.AppError)); ok { + return rf(page, perPage, opt) + } + if rf, ok := ret.Get(0).(func(int, int, *model.GetFileInfosOptions) []*model.FileInfo); ok { + r0 = rf(page, perPage, opt) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.FileInfo) + } + } + + if rf, ok := ret.Get(1).(func(int, int, *model.GetFileInfosOptions) *model.AppError); ok { + r1 = rf(page, perPage, opt) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetFileLink provides a mock function with given fields: fileId +func (_m *API) GetFileLink(fileId string) (string, *model.AppError) { + ret := _m.Called(fileId) + + if len(ret) == 0 { + panic("no return value specified for GetFileLink") + } + + var r0 string + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string) (string, *model.AppError)); ok { + return rf(fileId) + } + if rf, ok := ret.Get(0).(func(string) string); ok { + r0 = rf(fileId) + } else { + r0 = ret.Get(0).(string) + } + + if rf, ok := ret.Get(1).(func(string) *model.AppError); ok { + r1 = rf(fileId) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetGroup provides a mock function with given fields: groupId +func (_m *API) GetGroup(groupId string) (*model.Group, *model.AppError) { + ret := _m.Called(groupId) + + if len(ret) == 0 { + panic("no return value specified for GetGroup") + } + + var r0 *model.Group + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string) (*model.Group, *model.AppError)); ok { + return rf(groupId) + } + if rf, ok := ret.Get(0).(func(string) *model.Group); ok { + r0 = rf(groupId) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Group) + } + } + + if rf, ok := ret.Get(1).(func(string) *model.AppError); ok { + r1 = rf(groupId) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetGroupByName provides a mock function with given fields: name +func (_m *API) GetGroupByName(name string) (*model.Group, *model.AppError) { + ret := _m.Called(name) + + if len(ret) == 0 { + panic("no return value specified for GetGroupByName") + } + + var r0 *model.Group + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string) (*model.Group, *model.AppError)); ok { + return rf(name) + } + if rf, ok := ret.Get(0).(func(string) *model.Group); ok { + r0 = rf(name) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Group) + } + } + + if rf, ok := ret.Get(1).(func(string) *model.AppError); ok { + r1 = rf(name) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetGroupByRemoteID provides a mock function with given fields: remoteID, groupSource +func (_m *API) GetGroupByRemoteID(remoteID string, groupSource model.GroupSource) (*model.Group, *model.AppError) { + ret := _m.Called(remoteID, groupSource) + + if len(ret) == 0 { + panic("no return value specified for GetGroupByRemoteID") + } + + var r0 *model.Group + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, model.GroupSource) (*model.Group, *model.AppError)); ok { + return rf(remoteID, groupSource) + } + if rf, ok := ret.Get(0).(func(string, model.GroupSource) *model.Group); ok { + r0 = rf(remoteID, groupSource) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Group) + } + } + + if rf, ok := ret.Get(1).(func(string, model.GroupSource) *model.AppError); ok { + r1 = rf(remoteID, groupSource) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetGroupChannel provides a mock function with given fields: userIds +func (_m *API) GetGroupChannel(userIds []string) (*model.Channel, *model.AppError) { + ret := _m.Called(userIds) + + if len(ret) == 0 { + panic("no return value specified for GetGroupChannel") + } + + var r0 *model.Channel + var r1 *model.AppError + if rf, ok := ret.Get(0).(func([]string) (*model.Channel, *model.AppError)); ok { + return rf(userIds) + } + if rf, ok := ret.Get(0).(func([]string) *model.Channel); ok { + r0 = rf(userIds) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Channel) + } + } + + if rf, ok := ret.Get(1).(func([]string) *model.AppError); ok { + r1 = rf(userIds) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetGroupMemberUsers provides a mock function with given fields: groupID, page, perPage +func (_m *API) GetGroupMemberUsers(groupID string, page int, perPage int) ([]*model.User, *model.AppError) { + ret := _m.Called(groupID, page, perPage) + + if len(ret) == 0 { + panic("no return value specified for GetGroupMemberUsers") + } + + var r0 []*model.User + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, int, int) ([]*model.User, *model.AppError)); ok { + return rf(groupID, page, perPage) + } + if rf, ok := ret.Get(0).(func(string, int, int) []*model.User); ok { + r0 = rf(groupID, page, perPage) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.User) + } + } + + if rf, ok := ret.Get(1).(func(string, int, int) *model.AppError); ok { + r1 = rf(groupID, page, perPage) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetGroupSyncable provides a mock function with given fields: groupID, syncableID, syncableType +func (_m *API) GetGroupSyncable(groupID string, syncableID string, syncableType model.GroupSyncableType) (*model.GroupSyncable, *model.AppError) { + ret := _m.Called(groupID, syncableID, syncableType) + + if len(ret) == 0 { + panic("no return value specified for GetGroupSyncable") + } + + var r0 *model.GroupSyncable + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, string, model.GroupSyncableType) (*model.GroupSyncable, *model.AppError)); ok { + return rf(groupID, syncableID, syncableType) + } + if rf, ok := ret.Get(0).(func(string, string, model.GroupSyncableType) *model.GroupSyncable); ok { + r0 = rf(groupID, syncableID, syncableType) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.GroupSyncable) + } + } + + if rf, ok := ret.Get(1).(func(string, string, model.GroupSyncableType) *model.AppError); ok { + r1 = rf(groupID, syncableID, syncableType) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetGroupSyncables provides a mock function with given fields: groupID, syncableType +func (_m *API) GetGroupSyncables(groupID string, syncableType model.GroupSyncableType) ([]*model.GroupSyncable, *model.AppError) { + ret := _m.Called(groupID, syncableType) + + if len(ret) == 0 { + panic("no return value specified for GetGroupSyncables") + } + + var r0 []*model.GroupSyncable + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, model.GroupSyncableType) ([]*model.GroupSyncable, *model.AppError)); ok { + return rf(groupID, syncableType) + } + if rf, ok := ret.Get(0).(func(string, model.GroupSyncableType) []*model.GroupSyncable); ok { + r0 = rf(groupID, syncableType) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.GroupSyncable) + } + } + + if rf, ok := ret.Get(1).(func(string, model.GroupSyncableType) *model.AppError); ok { + r1 = rf(groupID, syncableType) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetGroups provides a mock function with given fields: page, perPage, opts, viewRestrictions +func (_m *API) GetGroups(page int, perPage int, opts model.GroupSearchOpts, viewRestrictions *model.ViewUsersRestrictions) ([]*model.Group, *model.AppError) { + ret := _m.Called(page, perPage, opts, viewRestrictions) + + if len(ret) == 0 { + panic("no return value specified for GetGroups") + } + + var r0 []*model.Group + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(int, int, model.GroupSearchOpts, *model.ViewUsersRestrictions) ([]*model.Group, *model.AppError)); ok { + return rf(page, perPage, opts, viewRestrictions) + } + if rf, ok := ret.Get(0).(func(int, int, model.GroupSearchOpts, *model.ViewUsersRestrictions) []*model.Group); ok { + r0 = rf(page, perPage, opts, viewRestrictions) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.Group) + } + } + + if rf, ok := ret.Get(1).(func(int, int, model.GroupSearchOpts, *model.ViewUsersRestrictions) *model.AppError); ok { + r1 = rf(page, perPage, opts, viewRestrictions) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetGroupsBySource provides a mock function with given fields: groupSource +func (_m *API) GetGroupsBySource(groupSource model.GroupSource) ([]*model.Group, *model.AppError) { + ret := _m.Called(groupSource) + + if len(ret) == 0 { + panic("no return value specified for GetGroupsBySource") + } + + var r0 []*model.Group + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(model.GroupSource) ([]*model.Group, *model.AppError)); ok { + return rf(groupSource) + } + if rf, ok := ret.Get(0).(func(model.GroupSource) []*model.Group); ok { + r0 = rf(groupSource) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.Group) + } + } + + if rf, ok := ret.Get(1).(func(model.GroupSource) *model.AppError); ok { + r1 = rf(groupSource) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetGroupsForUser provides a mock function with given fields: userID +func (_m *API) GetGroupsForUser(userID string) ([]*model.Group, *model.AppError) { + ret := _m.Called(userID) + + if len(ret) == 0 { + panic("no return value specified for GetGroupsForUser") + } + + var r0 []*model.Group + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string) ([]*model.Group, *model.AppError)); ok { + return rf(userID) + } + if rf, ok := ret.Get(0).(func(string) []*model.Group); ok { + r0 = rf(userID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.Group) + } + } + + if rf, ok := ret.Get(1).(func(string) *model.AppError); ok { + r1 = rf(userID) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetLDAPUserAttributes provides a mock function with given fields: userID, attributes +func (_m *API) GetLDAPUserAttributes(userID string, attributes []string) (map[string]string, *model.AppError) { + ret := _m.Called(userID, attributes) + + if len(ret) == 0 { + panic("no return value specified for GetLDAPUserAttributes") + } + + var r0 map[string]string + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, []string) (map[string]string, *model.AppError)); ok { + return rf(userID, attributes) + } + if rf, ok := ret.Get(0).(func(string, []string) map[string]string); ok { + r0 = rf(userID, attributes) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(map[string]string) + } + } + + if rf, ok := ret.Get(1).(func(string, []string) *model.AppError); ok { + r1 = rf(userID, attributes) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetLicense provides a mock function with no fields +func (_m *API) GetLicense() *model.License { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for GetLicense") + } + + var r0 *model.License + if rf, ok := ret.Get(0).(func() *model.License); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.License) + } + } + + return r0 +} + +// GetOAuthApp provides a mock function with given fields: appID +func (_m *API) GetOAuthApp(appID string) (*model.OAuthApp, *model.AppError) { + ret := _m.Called(appID) + + if len(ret) == 0 { + panic("no return value specified for GetOAuthApp") + } + + var r0 *model.OAuthApp + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string) (*model.OAuthApp, *model.AppError)); ok { + return rf(appID) + } + if rf, ok := ret.Get(0).(func(string) *model.OAuthApp); ok { + r0 = rf(appID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.OAuthApp) + } + } + + if rf, ok := ret.Get(1).(func(string) *model.AppError); ok { + r1 = rf(appID) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetPluginConfig provides a mock function with no fields +func (_m *API) GetPluginConfig() map[string]interface{} { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for GetPluginConfig") + } + + var r0 map[string]interface{} + if rf, ok := ret.Get(0).(func() map[string]interface{}); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(map[string]interface{}) + } + } + + return r0 +} + +// GetPluginID provides a mock function with no fields +func (_m *API) GetPluginID() string { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for GetPluginID") + } + + var r0 string + if rf, ok := ret.Get(0).(func() string); ok { + r0 = rf() + } else { + r0 = ret.Get(0).(string) + } + + return r0 +} + +// GetPluginStatus provides a mock function with given fields: id +func (_m *API) GetPluginStatus(id string) (*model.PluginStatus, *model.AppError) { + ret := _m.Called(id) + + if len(ret) == 0 { + panic("no return value specified for GetPluginStatus") + } + + var r0 *model.PluginStatus + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string) (*model.PluginStatus, *model.AppError)); ok { + return rf(id) + } + if rf, ok := ret.Get(0).(func(string) *model.PluginStatus); ok { + r0 = rf(id) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.PluginStatus) + } + } + + if rf, ok := ret.Get(1).(func(string) *model.AppError); ok { + r1 = rf(id) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetPlugins provides a mock function with no fields +func (_m *API) GetPlugins() ([]*model.Manifest, *model.AppError) { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for GetPlugins") + } + + var r0 []*model.Manifest + var r1 *model.AppError + if rf, ok := ret.Get(0).(func() ([]*model.Manifest, *model.AppError)); ok { + return rf() + } + if rf, ok := ret.Get(0).(func() []*model.Manifest); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.Manifest) + } + } + + if rf, ok := ret.Get(1).(func() *model.AppError); ok { + r1 = rf() + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetPost provides a mock function with given fields: postId +func (_m *API) GetPost(postId string) (*model.Post, *model.AppError) { + ret := _m.Called(postId) + + if len(ret) == 0 { + panic("no return value specified for GetPost") + } + + var r0 *model.Post + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string) (*model.Post, *model.AppError)); ok { + return rf(postId) + } + if rf, ok := ret.Get(0).(func(string) *model.Post); ok { + r0 = rf(postId) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Post) + } + } + + if rf, ok := ret.Get(1).(func(string) *model.AppError); ok { + r1 = rf(postId) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetPostThread provides a mock function with given fields: postId +func (_m *API) GetPostThread(postId string) (*model.PostList, *model.AppError) { + ret := _m.Called(postId) + + if len(ret) == 0 { + panic("no return value specified for GetPostThread") + } + + var r0 *model.PostList + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string) (*model.PostList, *model.AppError)); ok { + return rf(postId) + } + if rf, ok := ret.Get(0).(func(string) *model.PostList); ok { + r0 = rf(postId) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.PostList) + } + } + + if rf, ok := ret.Get(1).(func(string) *model.AppError); ok { + r1 = rf(postId) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetPostsAfter provides a mock function with given fields: channelId, postId, page, perPage +func (_m *API) GetPostsAfter(channelId string, postId string, page int, perPage int) (*model.PostList, *model.AppError) { + ret := _m.Called(channelId, postId, page, perPage) + + if len(ret) == 0 { + panic("no return value specified for GetPostsAfter") + } + + var r0 *model.PostList + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, string, int, int) (*model.PostList, *model.AppError)); ok { + return rf(channelId, postId, page, perPage) + } + if rf, ok := ret.Get(0).(func(string, string, int, int) *model.PostList); ok { + r0 = rf(channelId, postId, page, perPage) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.PostList) + } + } + + if rf, ok := ret.Get(1).(func(string, string, int, int) *model.AppError); ok { + r1 = rf(channelId, postId, page, perPage) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetPostsBefore provides a mock function with given fields: channelId, postId, page, perPage +func (_m *API) GetPostsBefore(channelId string, postId string, page int, perPage int) (*model.PostList, *model.AppError) { + ret := _m.Called(channelId, postId, page, perPage) + + if len(ret) == 0 { + panic("no return value specified for GetPostsBefore") + } + + var r0 *model.PostList + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, string, int, int) (*model.PostList, *model.AppError)); ok { + return rf(channelId, postId, page, perPage) + } + if rf, ok := ret.Get(0).(func(string, string, int, int) *model.PostList); ok { + r0 = rf(channelId, postId, page, perPage) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.PostList) + } + } + + if rf, ok := ret.Get(1).(func(string, string, int, int) *model.AppError); ok { + r1 = rf(channelId, postId, page, perPage) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetPostsForChannel provides a mock function with given fields: channelId, page, perPage +func (_m *API) GetPostsForChannel(channelId string, page int, perPage int) (*model.PostList, *model.AppError) { + ret := _m.Called(channelId, page, perPage) + + if len(ret) == 0 { + panic("no return value specified for GetPostsForChannel") + } + + var r0 *model.PostList + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, int, int) (*model.PostList, *model.AppError)); ok { + return rf(channelId, page, perPage) + } + if rf, ok := ret.Get(0).(func(string, int, int) *model.PostList); ok { + r0 = rf(channelId, page, perPage) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.PostList) + } + } + + if rf, ok := ret.Get(1).(func(string, int, int) *model.AppError); ok { + r1 = rf(channelId, page, perPage) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetPostsSince provides a mock function with given fields: channelId, time +func (_m *API) GetPostsSince(channelId string, time int64) (*model.PostList, *model.AppError) { + ret := _m.Called(channelId, time) + + if len(ret) == 0 { + panic("no return value specified for GetPostsSince") + } + + var r0 *model.PostList + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, int64) (*model.PostList, *model.AppError)); ok { + return rf(channelId, time) + } + if rf, ok := ret.Get(0).(func(string, int64) *model.PostList); ok { + r0 = rf(channelId, time) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.PostList) + } + } + + if rf, ok := ret.Get(1).(func(string, int64) *model.AppError); ok { + r1 = rf(channelId, time) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetPreferenceForUser provides a mock function with given fields: userID, category, name +func (_m *API) GetPreferenceForUser(userID string, category string, name string) (model.Preference, *model.AppError) { + ret := _m.Called(userID, category, name) + + if len(ret) == 0 { + panic("no return value specified for GetPreferenceForUser") + } + + var r0 model.Preference + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, string, string) (model.Preference, *model.AppError)); ok { + return rf(userID, category, name) + } + if rf, ok := ret.Get(0).(func(string, string, string) model.Preference); ok { + r0 = rf(userID, category, name) + } else { + r0 = ret.Get(0).(model.Preference) + } + + if rf, ok := ret.Get(1).(func(string, string, string) *model.AppError); ok { + r1 = rf(userID, category, name) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetPreferencesForUser provides a mock function with given fields: userID +func (_m *API) GetPreferencesForUser(userID string) ([]model.Preference, *model.AppError) { + ret := _m.Called(userID) + + if len(ret) == 0 { + panic("no return value specified for GetPreferencesForUser") + } + + var r0 []model.Preference + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string) ([]model.Preference, *model.AppError)); ok { + return rf(userID) + } + if rf, ok := ret.Get(0).(func(string) []model.Preference); ok { + r0 = rf(userID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]model.Preference) + } + } + + if rf, ok := ret.Get(1).(func(string) *model.AppError); ok { + r1 = rf(userID) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetProfileImage provides a mock function with given fields: userID +func (_m *API) GetProfileImage(userID string) ([]byte, *model.AppError) { + ret := _m.Called(userID) + + if len(ret) == 0 { + panic("no return value specified for GetProfileImage") + } + + var r0 []byte + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string) ([]byte, *model.AppError)); ok { + return rf(userID) + } + if rf, ok := ret.Get(0).(func(string) []byte); ok { + r0 = rf(userID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]byte) + } + } + + if rf, ok := ret.Get(1).(func(string) *model.AppError); ok { + r1 = rf(userID) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetPropertyField provides a mock function with given fields: groupID, fieldID +func (_m *API) GetPropertyField(groupID string, fieldID string) (*model.PropertyField, error) { + ret := _m.Called(groupID, fieldID) + + if len(ret) == 0 { + panic("no return value specified for GetPropertyField") + } + + var r0 *model.PropertyField + var r1 error + if rf, ok := ret.Get(0).(func(string, string) (*model.PropertyField, error)); ok { + return rf(groupID, fieldID) + } + if rf, ok := ret.Get(0).(func(string, string) *model.PropertyField); ok { + r0 = rf(groupID, fieldID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.PropertyField) + } + } + + if rf, ok := ret.Get(1).(func(string, string) error); ok { + r1 = rf(groupID, fieldID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetPropertyFieldByName provides a mock function with given fields: groupID, targetID, name +func (_m *API) GetPropertyFieldByName(groupID string, targetID string, name string) (*model.PropertyField, error) { + ret := _m.Called(groupID, targetID, name) + + if len(ret) == 0 { + panic("no return value specified for GetPropertyFieldByName") + } + + var r0 *model.PropertyField + var r1 error + if rf, ok := ret.Get(0).(func(string, string, string) (*model.PropertyField, error)); ok { + return rf(groupID, targetID, name) + } + if rf, ok := ret.Get(0).(func(string, string, string) *model.PropertyField); ok { + r0 = rf(groupID, targetID, name) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.PropertyField) + } + } + + if rf, ok := ret.Get(1).(func(string, string, string) error); ok { + r1 = rf(groupID, targetID, name) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetPropertyFields provides a mock function with given fields: groupID, ids +func (_m *API) GetPropertyFields(groupID string, ids []string) ([]*model.PropertyField, error) { + ret := _m.Called(groupID, ids) + + if len(ret) == 0 { + panic("no return value specified for GetPropertyFields") + } + + var r0 []*model.PropertyField + var r1 error + if rf, ok := ret.Get(0).(func(string, []string) ([]*model.PropertyField, error)); ok { + return rf(groupID, ids) + } + if rf, ok := ret.Get(0).(func(string, []string) []*model.PropertyField); ok { + r0 = rf(groupID, ids) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.PropertyField) + } + } + + if rf, ok := ret.Get(1).(func(string, []string) error); ok { + r1 = rf(groupID, ids) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetPropertyGroup provides a mock function with given fields: name +func (_m *API) GetPropertyGroup(name string) (*model.PropertyGroup, error) { + ret := _m.Called(name) + + if len(ret) == 0 { + panic("no return value specified for GetPropertyGroup") + } + + var r0 *model.PropertyGroup + var r1 error + if rf, ok := ret.Get(0).(func(string) (*model.PropertyGroup, error)); ok { + return rf(name) + } + if rf, ok := ret.Get(0).(func(string) *model.PropertyGroup); ok { + r0 = rf(name) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.PropertyGroup) + } + } + + if rf, ok := ret.Get(1).(func(string) error); ok { + r1 = rf(name) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetPropertyValue provides a mock function with given fields: groupID, valueID +func (_m *API) GetPropertyValue(groupID string, valueID string) (*model.PropertyValue, error) { + ret := _m.Called(groupID, valueID) + + if len(ret) == 0 { + panic("no return value specified for GetPropertyValue") + } + + var r0 *model.PropertyValue + var r1 error + if rf, ok := ret.Get(0).(func(string, string) (*model.PropertyValue, error)); ok { + return rf(groupID, valueID) + } + if rf, ok := ret.Get(0).(func(string, string) *model.PropertyValue); ok { + r0 = rf(groupID, valueID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.PropertyValue) + } + } + + if rf, ok := ret.Get(1).(func(string, string) error); ok { + r1 = rf(groupID, valueID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetPropertyValues provides a mock function with given fields: groupID, ids +func (_m *API) GetPropertyValues(groupID string, ids []string) ([]*model.PropertyValue, error) { + ret := _m.Called(groupID, ids) + + if len(ret) == 0 { + panic("no return value specified for GetPropertyValues") + } + + var r0 []*model.PropertyValue + var r1 error + if rf, ok := ret.Get(0).(func(string, []string) ([]*model.PropertyValue, error)); ok { + return rf(groupID, ids) + } + if rf, ok := ret.Get(0).(func(string, []string) []*model.PropertyValue); ok { + r0 = rf(groupID, ids) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.PropertyValue) + } + } + + if rf, ok := ret.Get(1).(func(string, []string) error); ok { + r1 = rf(groupID, ids) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetPublicChannelsForTeam provides a mock function with given fields: teamID, page, perPage +func (_m *API) GetPublicChannelsForTeam(teamID string, page int, perPage int) ([]*model.Channel, *model.AppError) { + ret := _m.Called(teamID, page, perPage) + + if len(ret) == 0 { + panic("no return value specified for GetPublicChannelsForTeam") + } + + var r0 []*model.Channel + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, int, int) ([]*model.Channel, *model.AppError)); ok { + return rf(teamID, page, perPage) + } + if rf, ok := ret.Get(0).(func(string, int, int) []*model.Channel); ok { + r0 = rf(teamID, page, perPage) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.Channel) + } + } + + if rf, ok := ret.Get(1).(func(string, int, int) *model.AppError); ok { + r1 = rf(teamID, page, perPage) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetReactions provides a mock function with given fields: postId +func (_m *API) GetReactions(postId string) ([]*model.Reaction, *model.AppError) { + ret := _m.Called(postId) + + if len(ret) == 0 { + panic("no return value specified for GetReactions") + } + + var r0 []*model.Reaction + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string) ([]*model.Reaction, *model.AppError)); ok { + return rf(postId) + } + if rf, ok := ret.Get(0).(func(string) []*model.Reaction); ok { + r0 = rf(postId) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.Reaction) + } + } + + if rf, ok := ret.Get(1).(func(string) *model.AppError); ok { + r1 = rf(postId) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetServerVersion provides a mock function with no fields +func (_m *API) GetServerVersion() string { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for GetServerVersion") + } + + var r0 string + if rf, ok := ret.Get(0).(func() string); ok { + r0 = rf() + } else { + r0 = ret.Get(0).(string) + } + + return r0 +} + +// GetSession provides a mock function with given fields: sessionID +func (_m *API) GetSession(sessionID string) (*model.Session, *model.AppError) { + ret := _m.Called(sessionID) + + if len(ret) == 0 { + panic("no return value specified for GetSession") + } + + var r0 *model.Session + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string) (*model.Session, *model.AppError)); ok { + return rf(sessionID) + } + if rf, ok := ret.Get(0).(func(string) *model.Session); ok { + r0 = rf(sessionID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Session) + } + } + + if rf, ok := ret.Get(1).(func(string) *model.AppError); ok { + r1 = rf(sessionID) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetSystemInstallDate provides a mock function with no fields +func (_m *API) GetSystemInstallDate() (int64, *model.AppError) { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for GetSystemInstallDate") + } + + var r0 int64 + var r1 *model.AppError + if rf, ok := ret.Get(0).(func() (int64, *model.AppError)); ok { + return rf() + } + if rf, ok := ret.Get(0).(func() int64); ok { + r0 = rf() + } else { + r0 = ret.Get(0).(int64) + } + + if rf, ok := ret.Get(1).(func() *model.AppError); ok { + r1 = rf() + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetTeam provides a mock function with given fields: teamID +func (_m *API) GetTeam(teamID string) (*model.Team, *model.AppError) { + ret := _m.Called(teamID) + + if len(ret) == 0 { + panic("no return value specified for GetTeam") + } + + var r0 *model.Team + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string) (*model.Team, *model.AppError)); ok { + return rf(teamID) + } + if rf, ok := ret.Get(0).(func(string) *model.Team); ok { + r0 = rf(teamID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Team) + } + } + + if rf, ok := ret.Get(1).(func(string) *model.AppError); ok { + r1 = rf(teamID) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetTeamByName provides a mock function with given fields: name +func (_m *API) GetTeamByName(name string) (*model.Team, *model.AppError) { + ret := _m.Called(name) + + if len(ret) == 0 { + panic("no return value specified for GetTeamByName") + } + + var r0 *model.Team + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string) (*model.Team, *model.AppError)); ok { + return rf(name) + } + if rf, ok := ret.Get(0).(func(string) *model.Team); ok { + r0 = rf(name) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Team) + } + } + + if rf, ok := ret.Get(1).(func(string) *model.AppError); ok { + r1 = rf(name) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetTeamIcon provides a mock function with given fields: teamID +func (_m *API) GetTeamIcon(teamID string) ([]byte, *model.AppError) { + ret := _m.Called(teamID) + + if len(ret) == 0 { + panic("no return value specified for GetTeamIcon") + } + + var r0 []byte + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string) ([]byte, *model.AppError)); ok { + return rf(teamID) + } + if rf, ok := ret.Get(0).(func(string) []byte); ok { + r0 = rf(teamID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]byte) + } + } + + if rf, ok := ret.Get(1).(func(string) *model.AppError); ok { + r1 = rf(teamID) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetTeamMember provides a mock function with given fields: teamID, userID +func (_m *API) GetTeamMember(teamID string, userID string) (*model.TeamMember, *model.AppError) { + ret := _m.Called(teamID, userID) + + if len(ret) == 0 { + panic("no return value specified for GetTeamMember") + } + + var r0 *model.TeamMember + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, string) (*model.TeamMember, *model.AppError)); ok { + return rf(teamID, userID) + } + if rf, ok := ret.Get(0).(func(string, string) *model.TeamMember); ok { + r0 = rf(teamID, userID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.TeamMember) + } + } + + if rf, ok := ret.Get(1).(func(string, string) *model.AppError); ok { + r1 = rf(teamID, userID) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetTeamMembers provides a mock function with given fields: teamID, page, perPage +func (_m *API) GetTeamMembers(teamID string, page int, perPage int) ([]*model.TeamMember, *model.AppError) { + ret := _m.Called(teamID, page, perPage) + + if len(ret) == 0 { + panic("no return value specified for GetTeamMembers") + } + + var r0 []*model.TeamMember + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, int, int) ([]*model.TeamMember, *model.AppError)); ok { + return rf(teamID, page, perPage) + } + if rf, ok := ret.Get(0).(func(string, int, int) []*model.TeamMember); ok { + r0 = rf(teamID, page, perPage) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.TeamMember) + } + } + + if rf, ok := ret.Get(1).(func(string, int, int) *model.AppError); ok { + r1 = rf(teamID, page, perPage) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetTeamMembersForUser provides a mock function with given fields: userID, page, perPage +func (_m *API) GetTeamMembersForUser(userID string, page int, perPage int) ([]*model.TeamMember, *model.AppError) { + ret := _m.Called(userID, page, perPage) + + if len(ret) == 0 { + panic("no return value specified for GetTeamMembersForUser") + } + + var r0 []*model.TeamMember + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, int, int) ([]*model.TeamMember, *model.AppError)); ok { + return rf(userID, page, perPage) + } + if rf, ok := ret.Get(0).(func(string, int, int) []*model.TeamMember); ok { + r0 = rf(userID, page, perPage) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.TeamMember) + } + } + + if rf, ok := ret.Get(1).(func(string, int, int) *model.AppError); ok { + r1 = rf(userID, page, perPage) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetTeamStats provides a mock function with given fields: teamID +func (_m *API) GetTeamStats(teamID string) (*model.TeamStats, *model.AppError) { + ret := _m.Called(teamID) + + if len(ret) == 0 { + panic("no return value specified for GetTeamStats") + } + + var r0 *model.TeamStats + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string) (*model.TeamStats, *model.AppError)); ok { + return rf(teamID) + } + if rf, ok := ret.Get(0).(func(string) *model.TeamStats); ok { + r0 = rf(teamID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.TeamStats) + } + } + + if rf, ok := ret.Get(1).(func(string) *model.AppError); ok { + r1 = rf(teamID) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetTeams provides a mock function with no fields +func (_m *API) GetTeams() ([]*model.Team, *model.AppError) { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for GetTeams") + } + + var r0 []*model.Team + var r1 *model.AppError + if rf, ok := ret.Get(0).(func() ([]*model.Team, *model.AppError)); ok { + return rf() + } + if rf, ok := ret.Get(0).(func() []*model.Team); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.Team) + } + } + + if rf, ok := ret.Get(1).(func() *model.AppError); ok { + r1 = rf() + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetTeamsForUser provides a mock function with given fields: userID +func (_m *API) GetTeamsForUser(userID string) ([]*model.Team, *model.AppError) { + ret := _m.Called(userID) + + if len(ret) == 0 { + panic("no return value specified for GetTeamsForUser") + } + + var r0 []*model.Team + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string) ([]*model.Team, *model.AppError)); ok { + return rf(userID) + } + if rf, ok := ret.Get(0).(func(string) []*model.Team); ok { + r0 = rf(userID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.Team) + } + } + + if rf, ok := ret.Get(1).(func(string) *model.AppError); ok { + r1 = rf(userID) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetTeamsUnreadForUser provides a mock function with given fields: userID +func (_m *API) GetTeamsUnreadForUser(userID string) ([]*model.TeamUnread, *model.AppError) { + ret := _m.Called(userID) + + if len(ret) == 0 { + panic("no return value specified for GetTeamsUnreadForUser") + } + + var r0 []*model.TeamUnread + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string) ([]*model.TeamUnread, *model.AppError)); ok { + return rf(userID) + } + if rf, ok := ret.Get(0).(func(string) []*model.TeamUnread); ok { + r0 = rf(userID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.TeamUnread) + } + } + + if rf, ok := ret.Get(1).(func(string) *model.AppError); ok { + r1 = rf(userID) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetTelemetryId provides a mock function with no fields +func (_m *API) GetTelemetryId() string { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for GetTelemetryId") + } + + var r0 string + if rf, ok := ret.Get(0).(func() string); ok { + r0 = rf() + } else { + r0 = ret.Get(0).(string) + } + + return r0 +} + +// GetUnsanitizedConfig provides a mock function with no fields +func (_m *API) GetUnsanitizedConfig() *model.Config { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for GetUnsanitizedConfig") + } + + var r0 *model.Config + if rf, ok := ret.Get(0).(func() *model.Config); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Config) + } + } + + return r0 +} + +// GetUploadSession provides a mock function with given fields: uploadID +func (_m *API) GetUploadSession(uploadID string) (*model.UploadSession, error) { + ret := _m.Called(uploadID) + + if len(ret) == 0 { + panic("no return value specified for GetUploadSession") + } + + var r0 *model.UploadSession + var r1 error + if rf, ok := ret.Get(0).(func(string) (*model.UploadSession, error)); ok { + return rf(uploadID) + } + if rf, ok := ret.Get(0).(func(string) *model.UploadSession); ok { + r0 = rf(uploadID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.UploadSession) + } + } + + if rf, ok := ret.Get(1).(func(string) error); ok { + r1 = rf(uploadID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetUser provides a mock function with given fields: userID +func (_m *API) GetUser(userID string) (*model.User, *model.AppError) { + ret := _m.Called(userID) + + if len(ret) == 0 { + panic("no return value specified for GetUser") + } + + var r0 *model.User + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string) (*model.User, *model.AppError)); ok { + return rf(userID) + } + if rf, ok := ret.Get(0).(func(string) *model.User); ok { + r0 = rf(userID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.User) + } + } + + if rf, ok := ret.Get(1).(func(string) *model.AppError); ok { + r1 = rf(userID) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetUserByEmail provides a mock function with given fields: email +func (_m *API) GetUserByEmail(email string) (*model.User, *model.AppError) { + ret := _m.Called(email) + + if len(ret) == 0 { + panic("no return value specified for GetUserByEmail") + } + + var r0 *model.User + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string) (*model.User, *model.AppError)); ok { + return rf(email) + } + if rf, ok := ret.Get(0).(func(string) *model.User); ok { + r0 = rf(email) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.User) + } + } + + if rf, ok := ret.Get(1).(func(string) *model.AppError); ok { + r1 = rf(email) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetUserByUsername provides a mock function with given fields: name +func (_m *API) GetUserByUsername(name string) (*model.User, *model.AppError) { + ret := _m.Called(name) + + if len(ret) == 0 { + panic("no return value specified for GetUserByUsername") + } + + var r0 *model.User + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string) (*model.User, *model.AppError)); ok { + return rf(name) + } + if rf, ok := ret.Get(0).(func(string) *model.User); ok { + r0 = rf(name) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.User) + } + } + + if rf, ok := ret.Get(1).(func(string) *model.AppError); ok { + r1 = rf(name) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetUserStatus provides a mock function with given fields: userID +func (_m *API) GetUserStatus(userID string) (*model.Status, *model.AppError) { + ret := _m.Called(userID) + + if len(ret) == 0 { + panic("no return value specified for GetUserStatus") + } + + var r0 *model.Status + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string) (*model.Status, *model.AppError)); ok { + return rf(userID) + } + if rf, ok := ret.Get(0).(func(string) *model.Status); ok { + r0 = rf(userID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Status) + } + } + + if rf, ok := ret.Get(1).(func(string) *model.AppError); ok { + r1 = rf(userID) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetUserStatusesByIds provides a mock function with given fields: userIds +func (_m *API) GetUserStatusesByIds(userIds []string) ([]*model.Status, *model.AppError) { + ret := _m.Called(userIds) + + if len(ret) == 0 { + panic("no return value specified for GetUserStatusesByIds") + } + + var r0 []*model.Status + var r1 *model.AppError + if rf, ok := ret.Get(0).(func([]string) ([]*model.Status, *model.AppError)); ok { + return rf(userIds) + } + if rf, ok := ret.Get(0).(func([]string) []*model.Status); ok { + r0 = rf(userIds) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.Status) + } + } + + if rf, ok := ret.Get(1).(func([]string) *model.AppError); ok { + r1 = rf(userIds) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetUsers provides a mock function with given fields: options +func (_m *API) GetUsers(options *model.UserGetOptions) ([]*model.User, *model.AppError) { + ret := _m.Called(options) + + if len(ret) == 0 { + panic("no return value specified for GetUsers") + } + + var r0 []*model.User + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(*model.UserGetOptions) ([]*model.User, *model.AppError)); ok { + return rf(options) + } + if rf, ok := ret.Get(0).(func(*model.UserGetOptions) []*model.User); ok { + r0 = rf(options) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.User) + } + } + + if rf, ok := ret.Get(1).(func(*model.UserGetOptions) *model.AppError); ok { + r1 = rf(options) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetUsersByIds provides a mock function with given fields: userIDs +func (_m *API) GetUsersByIds(userIDs []string) ([]*model.User, *model.AppError) { + ret := _m.Called(userIDs) + + if len(ret) == 0 { + panic("no return value specified for GetUsersByIds") + } + + var r0 []*model.User + var r1 *model.AppError + if rf, ok := ret.Get(0).(func([]string) ([]*model.User, *model.AppError)); ok { + return rf(userIDs) + } + if rf, ok := ret.Get(0).(func([]string) []*model.User); ok { + r0 = rf(userIDs) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.User) + } + } + + if rf, ok := ret.Get(1).(func([]string) *model.AppError); ok { + r1 = rf(userIDs) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetUsersByUsernames provides a mock function with given fields: usernames +func (_m *API) GetUsersByUsernames(usernames []string) ([]*model.User, *model.AppError) { + ret := _m.Called(usernames) + + if len(ret) == 0 { + panic("no return value specified for GetUsersByUsernames") + } + + var r0 []*model.User + var r1 *model.AppError + if rf, ok := ret.Get(0).(func([]string) ([]*model.User, *model.AppError)); ok { + return rf(usernames) + } + if rf, ok := ret.Get(0).(func([]string) []*model.User); ok { + r0 = rf(usernames) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.User) + } + } + + if rf, ok := ret.Get(1).(func([]string) *model.AppError); ok { + r1 = rf(usernames) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetUsersInChannel provides a mock function with given fields: channelID, sortBy, page, perPage +func (_m *API) GetUsersInChannel(channelID string, sortBy string, page int, perPage int) ([]*model.User, *model.AppError) { + ret := _m.Called(channelID, sortBy, page, perPage) + + if len(ret) == 0 { + panic("no return value specified for GetUsersInChannel") + } + + var r0 []*model.User + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, string, int, int) ([]*model.User, *model.AppError)); ok { + return rf(channelID, sortBy, page, perPage) + } + if rf, ok := ret.Get(0).(func(string, string, int, int) []*model.User); ok { + r0 = rf(channelID, sortBy, page, perPage) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.User) + } + } + + if rf, ok := ret.Get(1).(func(string, string, int, int) *model.AppError); ok { + r1 = rf(channelID, sortBy, page, perPage) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// GetUsersInTeam provides a mock function with given fields: teamID, page, perPage +func (_m *API) GetUsersInTeam(teamID string, page int, perPage int) ([]*model.User, *model.AppError) { + ret := _m.Called(teamID, page, perPage) + + if len(ret) == 0 { + panic("no return value specified for GetUsersInTeam") + } + + var r0 []*model.User + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, int, int) ([]*model.User, *model.AppError)); ok { + return rf(teamID, page, perPage) + } + if rf, ok := ret.Get(0).(func(string, int, int) []*model.User); ok { + r0 = rf(teamID, page, perPage) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.User) + } + } + + if rf, ok := ret.Get(1).(func(string, int, int) *model.AppError); ok { + r1 = rf(teamID, page, perPage) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// HasPermissionTo provides a mock function with given fields: userID, permission +func (_m *API) HasPermissionTo(userID string, permission *model.Permission) bool { + ret := _m.Called(userID, permission) + + if len(ret) == 0 { + panic("no return value specified for HasPermissionTo") + } + + var r0 bool + if rf, ok := ret.Get(0).(func(string, *model.Permission) bool); ok { + r0 = rf(userID, permission) + } else { + r0 = ret.Get(0).(bool) + } + + return r0 +} + +// HasPermissionToChannel provides a mock function with given fields: userID, channelId, permission +func (_m *API) HasPermissionToChannel(userID string, channelId string, permission *model.Permission) bool { + ret := _m.Called(userID, channelId, permission) + + if len(ret) == 0 { + panic("no return value specified for HasPermissionToChannel") + } + + var r0 bool + if rf, ok := ret.Get(0).(func(string, string, *model.Permission) bool); ok { + r0 = rf(userID, channelId, permission) + } else { + r0 = ret.Get(0).(bool) + } + + return r0 +} + +// HasPermissionToTeam provides a mock function with given fields: userID, teamID, permission +func (_m *API) HasPermissionToTeam(userID string, teamID string, permission *model.Permission) bool { + ret := _m.Called(userID, teamID, permission) + + if len(ret) == 0 { + panic("no return value specified for HasPermissionToTeam") + } + + var r0 bool + if rf, ok := ret.Get(0).(func(string, string, *model.Permission) bool); ok { + r0 = rf(userID, teamID, permission) + } else { + r0 = ret.Get(0).(bool) + } + + return r0 +} + +// InstallPlugin provides a mock function with given fields: file, replace +func (_m *API) InstallPlugin(file io.Reader, replace bool) (*model.Manifest, *model.AppError) { + ret := _m.Called(file, replace) + + if len(ret) == 0 { + panic("no return value specified for InstallPlugin") + } + + var r0 *model.Manifest + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(io.Reader, bool) (*model.Manifest, *model.AppError)); ok { + return rf(file, replace) + } + if rf, ok := ret.Get(0).(func(io.Reader, bool) *model.Manifest); ok { + r0 = rf(file, replace) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Manifest) + } + } + + if rf, ok := ret.Get(1).(func(io.Reader, bool) *model.AppError); ok { + r1 = rf(file, replace) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// InviteRemoteToChannel provides a mock function with given fields: channelID, remoteID, userID, shareIfNotShared +func (_m *API) InviteRemoteToChannel(channelID string, remoteID string, userID string, shareIfNotShared bool) error { + ret := _m.Called(channelID, remoteID, userID, shareIfNotShared) + + if len(ret) == 0 { + panic("no return value specified for InviteRemoteToChannel") + } + + var r0 error + if rf, ok := ret.Get(0).(func(string, string, string, bool) error); ok { + r0 = rf(channelID, remoteID, userID, shareIfNotShared) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// IsEnterpriseReady provides a mock function with no fields +func (_m *API) IsEnterpriseReady() bool { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for IsEnterpriseReady") + } + + var r0 bool + if rf, ok := ret.Get(0).(func() bool); ok { + r0 = rf() + } else { + r0 = ret.Get(0).(bool) + } + + return r0 +} + +// KVCompareAndDelete provides a mock function with given fields: key, oldValue +func (_m *API) KVCompareAndDelete(key string, oldValue []byte) (bool, *model.AppError) { + ret := _m.Called(key, oldValue) + + if len(ret) == 0 { + panic("no return value specified for KVCompareAndDelete") + } + + var r0 bool + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, []byte) (bool, *model.AppError)); ok { + return rf(key, oldValue) + } + if rf, ok := ret.Get(0).(func(string, []byte) bool); ok { + r0 = rf(key, oldValue) + } else { + r0 = ret.Get(0).(bool) + } + + if rf, ok := ret.Get(1).(func(string, []byte) *model.AppError); ok { + r1 = rf(key, oldValue) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// KVCompareAndSet provides a mock function with given fields: key, oldValue, newValue +func (_m *API) KVCompareAndSet(key string, oldValue []byte, newValue []byte) (bool, *model.AppError) { + ret := _m.Called(key, oldValue, newValue) + + if len(ret) == 0 { + panic("no return value specified for KVCompareAndSet") + } + + var r0 bool + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, []byte, []byte) (bool, *model.AppError)); ok { + return rf(key, oldValue, newValue) + } + if rf, ok := ret.Get(0).(func(string, []byte, []byte) bool); ok { + r0 = rf(key, oldValue, newValue) + } else { + r0 = ret.Get(0).(bool) + } + + if rf, ok := ret.Get(1).(func(string, []byte, []byte) *model.AppError); ok { + r1 = rf(key, oldValue, newValue) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// KVDelete provides a mock function with given fields: key +func (_m *API) KVDelete(key string) *model.AppError { + ret := _m.Called(key) + + if len(ret) == 0 { + panic("no return value specified for KVDelete") + } + + var r0 *model.AppError + if rf, ok := ret.Get(0).(func(string) *model.AppError); ok { + r0 = rf(key) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.AppError) + } + } + + return r0 +} + +// KVDeleteAll provides a mock function with no fields +func (_m *API) KVDeleteAll() *model.AppError { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for KVDeleteAll") + } + + var r0 *model.AppError + if rf, ok := ret.Get(0).(func() *model.AppError); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.AppError) + } + } + + return r0 +} + +// KVGet provides a mock function with given fields: key +func (_m *API) KVGet(key string) ([]byte, *model.AppError) { + ret := _m.Called(key) + + if len(ret) == 0 { + panic("no return value specified for KVGet") + } + + var r0 []byte + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string) ([]byte, *model.AppError)); ok { + return rf(key) + } + if rf, ok := ret.Get(0).(func(string) []byte); ok { + r0 = rf(key) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]byte) + } + } + + if rf, ok := ret.Get(1).(func(string) *model.AppError); ok { + r1 = rf(key) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// KVList provides a mock function with given fields: page, perPage +func (_m *API) KVList(page int, perPage int) ([]string, *model.AppError) { + ret := _m.Called(page, perPage) + + if len(ret) == 0 { + panic("no return value specified for KVList") + } + + var r0 []string + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(int, int) ([]string, *model.AppError)); ok { + return rf(page, perPage) + } + if rf, ok := ret.Get(0).(func(int, int) []string); ok { + r0 = rf(page, perPage) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]string) + } + } + + if rf, ok := ret.Get(1).(func(int, int) *model.AppError); ok { + r1 = rf(page, perPage) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// KVSet provides a mock function with given fields: key, value +func (_m *API) KVSet(key string, value []byte) *model.AppError { + ret := _m.Called(key, value) + + if len(ret) == 0 { + panic("no return value specified for KVSet") + } + + var r0 *model.AppError + if rf, ok := ret.Get(0).(func(string, []byte) *model.AppError); ok { + r0 = rf(key, value) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.AppError) + } + } + + return r0 +} + +// KVSetWithExpiry provides a mock function with given fields: key, value, expireInSeconds +func (_m *API) KVSetWithExpiry(key string, value []byte, expireInSeconds int64) *model.AppError { + ret := _m.Called(key, value, expireInSeconds) + + if len(ret) == 0 { + panic("no return value specified for KVSetWithExpiry") + } + + var r0 *model.AppError + if rf, ok := ret.Get(0).(func(string, []byte, int64) *model.AppError); ok { + r0 = rf(key, value, expireInSeconds) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.AppError) + } + } + + return r0 +} + +// KVSetWithOptions provides a mock function with given fields: key, value, options +func (_m *API) KVSetWithOptions(key string, value []byte, options model.PluginKVSetOptions) (bool, *model.AppError) { + ret := _m.Called(key, value, options) + + if len(ret) == 0 { + panic("no return value specified for KVSetWithOptions") + } + + var r0 bool + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, []byte, model.PluginKVSetOptions) (bool, *model.AppError)); ok { + return rf(key, value, options) + } + if rf, ok := ret.Get(0).(func(string, []byte, model.PluginKVSetOptions) bool); ok { + r0 = rf(key, value, options) + } else { + r0 = ret.Get(0).(bool) + } + + if rf, ok := ret.Get(1).(func(string, []byte, model.PluginKVSetOptions) *model.AppError); ok { + r1 = rf(key, value, options) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// ListBuiltInCommands provides a mock function with no fields +func (_m *API) ListBuiltInCommands() ([]*model.Command, error) { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for ListBuiltInCommands") + } + + var r0 []*model.Command + var r1 error + if rf, ok := ret.Get(0).(func() ([]*model.Command, error)); ok { + return rf() + } + if rf, ok := ret.Get(0).(func() []*model.Command); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.Command) + } + } + + if rf, ok := ret.Get(1).(func() error); ok { + r1 = rf() + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// ListCommands provides a mock function with given fields: teamID +func (_m *API) ListCommands(teamID string) ([]*model.Command, error) { + ret := _m.Called(teamID) + + if len(ret) == 0 { + panic("no return value specified for ListCommands") + } + + var r0 []*model.Command + var r1 error + if rf, ok := ret.Get(0).(func(string) ([]*model.Command, error)); ok { + return rf(teamID) + } + if rf, ok := ret.Get(0).(func(string) []*model.Command); ok { + r0 = rf(teamID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.Command) + } + } + + if rf, ok := ret.Get(1).(func(string) error); ok { + r1 = rf(teamID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// ListCustomCommands provides a mock function with given fields: teamID +func (_m *API) ListCustomCommands(teamID string) ([]*model.Command, error) { + ret := _m.Called(teamID) + + if len(ret) == 0 { + panic("no return value specified for ListCustomCommands") + } + + var r0 []*model.Command + var r1 error + if rf, ok := ret.Get(0).(func(string) ([]*model.Command, error)); ok { + return rf(teamID) + } + if rf, ok := ret.Get(0).(func(string) []*model.Command); ok { + r0 = rf(teamID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.Command) + } + } + + if rf, ok := ret.Get(1).(func(string) error); ok { + r1 = rf(teamID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// ListPluginCommands provides a mock function with given fields: teamID +func (_m *API) ListPluginCommands(teamID string) ([]*model.Command, error) { + ret := _m.Called(teamID) + + if len(ret) == 0 { + panic("no return value specified for ListPluginCommands") + } + + var r0 []*model.Command + var r1 error + if rf, ok := ret.Get(0).(func(string) ([]*model.Command, error)); ok { + return rf(teamID) + } + if rf, ok := ret.Get(0).(func(string) []*model.Command); ok { + r0 = rf(teamID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.Command) + } + } + + if rf, ok := ret.Get(1).(func(string) error); ok { + r1 = rf(teamID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// LoadPluginConfiguration provides a mock function with given fields: dest +func (_m *API) LoadPluginConfiguration(dest interface{}) error { + ret := _m.Called(dest) + + if len(ret) == 0 { + panic("no return value specified for LoadPluginConfiguration") + } + + var r0 error + if rf, ok := ret.Get(0).(func(interface{}) error); ok { + r0 = rf(dest) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// LogAuditRec provides a mock function with given fields: rec +func (_m *API) LogAuditRec(rec *model.AuditRecord) { + _m.Called(rec) +} + +// LogAuditRecWithLevel provides a mock function with given fields: rec, level +func (_m *API) LogAuditRecWithLevel(rec *model.AuditRecord, level logr.Level) { + _m.Called(rec, level) +} + +// LogDebug provides a mock function with given fields: msg, keyValuePairs +func (_m *API) LogDebug(msg string, keyValuePairs ...interface{}) { + var _ca []interface{} + _ca = append(_ca, msg) + _ca = append(_ca, keyValuePairs...) + _m.Called(_ca...) +} + +// LogError provides a mock function with given fields: msg, keyValuePairs +func (_m *API) LogError(msg string, keyValuePairs ...interface{}) { + var _ca []interface{} + _ca = append(_ca, msg) + _ca = append(_ca, keyValuePairs...) + _m.Called(_ca...) +} + +// LogInfo provides a mock function with given fields: msg, keyValuePairs +func (_m *API) LogInfo(msg string, keyValuePairs ...interface{}) { + var _ca []interface{} + _ca = append(_ca, msg) + _ca = append(_ca, keyValuePairs...) + _m.Called(_ca...) +} + +// LogWarn provides a mock function with given fields: msg, keyValuePairs +func (_m *API) LogWarn(msg string, keyValuePairs ...interface{}) { + var _ca []interface{} + _ca = append(_ca, msg) + _ca = append(_ca, keyValuePairs...) + _m.Called(_ca...) +} + +// OpenInteractiveDialog provides a mock function with given fields: dialog +func (_m *API) OpenInteractiveDialog(dialog model.OpenDialogRequest) *model.AppError { + ret := _m.Called(dialog) + + if len(ret) == 0 { + panic("no return value specified for OpenInteractiveDialog") + } + + var r0 *model.AppError + if rf, ok := ret.Get(0).(func(model.OpenDialogRequest) *model.AppError); ok { + r0 = rf(dialog) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.AppError) + } + } + + return r0 +} + +// PatchBot provides a mock function with given fields: botUserId, botPatch +func (_m *API) PatchBot(botUserId string, botPatch *model.BotPatch) (*model.Bot, *model.AppError) { + ret := _m.Called(botUserId, botPatch) + + if len(ret) == 0 { + panic("no return value specified for PatchBot") + } + + var r0 *model.Bot + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, *model.BotPatch) (*model.Bot, *model.AppError)); ok { + return rf(botUserId, botPatch) + } + if rf, ok := ret.Get(0).(func(string, *model.BotPatch) *model.Bot); ok { + r0 = rf(botUserId, botPatch) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Bot) + } + } + + if rf, ok := ret.Get(1).(func(string, *model.BotPatch) *model.AppError); ok { + r1 = rf(botUserId, botPatch) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// PatchChannelMembersNotifications provides a mock function with given fields: members, notifyProps +func (_m *API) PatchChannelMembersNotifications(members []*model.ChannelMemberIdentifier, notifyProps map[string]string) *model.AppError { + ret := _m.Called(members, notifyProps) + + if len(ret) == 0 { + panic("no return value specified for PatchChannelMembersNotifications") + } + + var r0 *model.AppError + if rf, ok := ret.Get(0).(func([]*model.ChannelMemberIdentifier, map[string]string) *model.AppError); ok { + r0 = rf(members, notifyProps) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.AppError) + } + } + + return r0 +} + +// PermanentDeleteBot provides a mock function with given fields: botUserId +func (_m *API) PermanentDeleteBot(botUserId string) *model.AppError { + ret := _m.Called(botUserId) + + if len(ret) == 0 { + panic("no return value specified for PermanentDeleteBot") + } + + var r0 *model.AppError + if rf, ok := ret.Get(0).(func(string) *model.AppError); ok { + r0 = rf(botUserId) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.AppError) + } + } + + return r0 +} + +// PluginHTTP provides a mock function with given fields: request +func (_m *API) PluginHTTP(request *http.Request) *http.Response { + ret := _m.Called(request) + + if len(ret) == 0 { + panic("no return value specified for PluginHTTP") + } + + var r0 *http.Response + if rf, ok := ret.Get(0).(func(*http.Request) *http.Response); ok { + r0 = rf(request) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*http.Response) + } + } + + return r0 +} + +// PublishPluginClusterEvent provides a mock function with given fields: ev, opts +func (_m *API) PublishPluginClusterEvent(ev model.PluginClusterEvent, opts model.PluginClusterEventSendOptions) error { + ret := _m.Called(ev, opts) + + if len(ret) == 0 { + panic("no return value specified for PublishPluginClusterEvent") + } + + var r0 error + if rf, ok := ret.Get(0).(func(model.PluginClusterEvent, model.PluginClusterEventSendOptions) error); ok { + r0 = rf(ev, opts) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// PublishUserTyping provides a mock function with given fields: userID, channelId, parentId +func (_m *API) PublishUserTyping(userID string, channelId string, parentId string) *model.AppError { + ret := _m.Called(userID, channelId, parentId) + + if len(ret) == 0 { + panic("no return value specified for PublishUserTyping") + } + + var r0 *model.AppError + if rf, ok := ret.Get(0).(func(string, string, string) *model.AppError); ok { + r0 = rf(userID, channelId, parentId) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.AppError) + } + } + + return r0 +} + +// PublishWebSocketEvent provides a mock function with given fields: event, payload, broadcast +func (_m *API) PublishWebSocketEvent(event string, payload map[string]interface{}, broadcast *model.WebsocketBroadcast) { + _m.Called(event, payload, broadcast) +} + +// ReadFile provides a mock function with given fields: path +func (_m *API) ReadFile(path string) ([]byte, *model.AppError) { + ret := _m.Called(path) + + if len(ret) == 0 { + panic("no return value specified for ReadFile") + } + + var r0 []byte + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string) ([]byte, *model.AppError)); ok { + return rf(path) + } + if rf, ok := ret.Get(0).(func(string) []byte); ok { + r0 = rf(path) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]byte) + } + } + + if rf, ok := ret.Get(1).(func(string) *model.AppError); ok { + r1 = rf(path) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// RegisterCollectionAndTopic provides a mock function with given fields: collectionType, topicType +func (_m *API) RegisterCollectionAndTopic(collectionType string, topicType string) error { + ret := _m.Called(collectionType, topicType) + + if len(ret) == 0 { + panic("no return value specified for RegisterCollectionAndTopic") + } + + var r0 error + if rf, ok := ret.Get(0).(func(string, string) error); ok { + r0 = rf(collectionType, topicType) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// RegisterCommand provides a mock function with given fields: command +func (_m *API) RegisterCommand(command *model.Command) error { + ret := _m.Called(command) + + if len(ret) == 0 { + panic("no return value specified for RegisterCommand") + } + + var r0 error + if rf, ok := ret.Get(0).(func(*model.Command) error); ok { + r0 = rf(command) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// RegisterPluginForSharedChannels provides a mock function with given fields: opts +func (_m *API) RegisterPluginForSharedChannels(opts model.RegisterPluginOpts) (string, error) { + ret := _m.Called(opts) + + if len(ret) == 0 { + panic("no return value specified for RegisterPluginForSharedChannels") + } + + var r0 string + var r1 error + if rf, ok := ret.Get(0).(func(model.RegisterPluginOpts) (string, error)); ok { + return rf(opts) + } + if rf, ok := ret.Get(0).(func(model.RegisterPluginOpts) string); ok { + r0 = rf(opts) + } else { + r0 = ret.Get(0).(string) + } + + if rf, ok := ret.Get(1).(func(model.RegisterPluginOpts) error); ok { + r1 = rf(opts) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// RegisterPropertyGroup provides a mock function with given fields: name +func (_m *API) RegisterPropertyGroup(name string) (*model.PropertyGroup, error) { + ret := _m.Called(name) + + if len(ret) == 0 { + panic("no return value specified for RegisterPropertyGroup") + } + + var r0 *model.PropertyGroup + var r1 error + if rf, ok := ret.Get(0).(func(string) (*model.PropertyGroup, error)); ok { + return rf(name) + } + if rf, ok := ret.Get(0).(func(string) *model.PropertyGroup); ok { + r0 = rf(name) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.PropertyGroup) + } + } + + if rf, ok := ret.Get(1).(func(string) error); ok { + r1 = rf(name) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// RemovePlugin provides a mock function with given fields: id +func (_m *API) RemovePlugin(id string) *model.AppError { + ret := _m.Called(id) + + if len(ret) == 0 { + panic("no return value specified for RemovePlugin") + } + + var r0 *model.AppError + if rf, ok := ret.Get(0).(func(string) *model.AppError); ok { + r0 = rf(id) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.AppError) + } + } + + return r0 +} + +// RemoveReaction provides a mock function with given fields: reaction +func (_m *API) RemoveReaction(reaction *model.Reaction) *model.AppError { + ret := _m.Called(reaction) + + if len(ret) == 0 { + panic("no return value specified for RemoveReaction") + } + + var r0 *model.AppError + if rf, ok := ret.Get(0).(func(*model.Reaction) *model.AppError); ok { + r0 = rf(reaction) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.AppError) + } + } + + return r0 +} + +// RemoveTeamIcon provides a mock function with given fields: teamID +func (_m *API) RemoveTeamIcon(teamID string) *model.AppError { + ret := _m.Called(teamID) + + if len(ret) == 0 { + panic("no return value specified for RemoveTeamIcon") + } + + var r0 *model.AppError + if rf, ok := ret.Get(0).(func(string) *model.AppError); ok { + r0 = rf(teamID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.AppError) + } + } + + return r0 +} + +// RemoveUserCustomStatus provides a mock function with given fields: userID +func (_m *API) RemoveUserCustomStatus(userID string) *model.AppError { + ret := _m.Called(userID) + + if len(ret) == 0 { + panic("no return value specified for RemoveUserCustomStatus") + } + + var r0 *model.AppError + if rf, ok := ret.Get(0).(func(string) *model.AppError); ok { + r0 = rf(userID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.AppError) + } + } + + return r0 +} + +// RequestTrialLicense provides a mock function with given fields: requesterID, users, termsAccepted, receiveEmailsAccepted +func (_m *API) RequestTrialLicense(requesterID string, users int, termsAccepted bool, receiveEmailsAccepted bool) *model.AppError { + ret := _m.Called(requesterID, users, termsAccepted, receiveEmailsAccepted) + + if len(ret) == 0 { + panic("no return value specified for RequestTrialLicense") + } + + var r0 *model.AppError + if rf, ok := ret.Get(0).(func(string, int, bool, bool) *model.AppError); ok { + r0 = rf(requesterID, users, termsAccepted, receiveEmailsAccepted) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.AppError) + } + } + + return r0 +} + +// RestoreGroup provides a mock function with given fields: groupID +func (_m *API) RestoreGroup(groupID string) (*model.Group, *model.AppError) { + ret := _m.Called(groupID) + + if len(ret) == 0 { + panic("no return value specified for RestoreGroup") + } + + var r0 *model.Group + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string) (*model.Group, *model.AppError)); ok { + return rf(groupID) + } + if rf, ok := ret.Get(0).(func(string) *model.Group); ok { + r0 = rf(groupID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Group) + } + } + + if rf, ok := ret.Get(1).(func(string) *model.AppError); ok { + r1 = rf(groupID) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// RevokeSession provides a mock function with given fields: sessionID +func (_m *API) RevokeSession(sessionID string) *model.AppError { + ret := _m.Called(sessionID) + + if len(ret) == 0 { + panic("no return value specified for RevokeSession") + } + + var r0 *model.AppError + if rf, ok := ret.Get(0).(func(string) *model.AppError); ok { + r0 = rf(sessionID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.AppError) + } + } + + return r0 +} + +// RevokeUserAccessToken provides a mock function with given fields: tokenID +func (_m *API) RevokeUserAccessToken(tokenID string) *model.AppError { + ret := _m.Called(tokenID) + + if len(ret) == 0 { + panic("no return value specified for RevokeUserAccessToken") + } + + var r0 *model.AppError + if rf, ok := ret.Get(0).(func(string) *model.AppError); ok { + r0 = rf(tokenID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.AppError) + } + } + + return r0 +} + +// RolesGrantPermission provides a mock function with given fields: roleNames, permissionId +func (_m *API) RolesGrantPermission(roleNames []string, permissionId string) bool { + ret := _m.Called(roleNames, permissionId) + + if len(ret) == 0 { + panic("no return value specified for RolesGrantPermission") + } + + var r0 bool + if rf, ok := ret.Get(0).(func([]string, string) bool); ok { + r0 = rf(roleNames, permissionId) + } else { + r0 = ret.Get(0).(bool) + } + + return r0 +} + +// SaveConfig provides a mock function with given fields: config +func (_m *API) SaveConfig(config *model.Config) *model.AppError { + ret := _m.Called(config) + + if len(ret) == 0 { + panic("no return value specified for SaveConfig") + } + + var r0 *model.AppError + if rf, ok := ret.Get(0).(func(*model.Config) *model.AppError); ok { + r0 = rf(config) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.AppError) + } + } + + return r0 +} + +// SavePluginConfig provides a mock function with given fields: config +func (_m *API) SavePluginConfig(config map[string]interface{}) *model.AppError { + ret := _m.Called(config) + + if len(ret) == 0 { + panic("no return value specified for SavePluginConfig") + } + + var r0 *model.AppError + if rf, ok := ret.Get(0).(func(map[string]interface{}) *model.AppError); ok { + r0 = rf(config) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.AppError) + } + } + + return r0 +} + +// SearchChannels provides a mock function with given fields: teamID, term +func (_m *API) SearchChannels(teamID string, term string) ([]*model.Channel, *model.AppError) { + ret := _m.Called(teamID, term) + + if len(ret) == 0 { + panic("no return value specified for SearchChannels") + } + + var r0 []*model.Channel + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, string) ([]*model.Channel, *model.AppError)); ok { + return rf(teamID, term) + } + if rf, ok := ret.Get(0).(func(string, string) []*model.Channel); ok { + r0 = rf(teamID, term) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.Channel) + } + } + + if rf, ok := ret.Get(1).(func(string, string) *model.AppError); ok { + r1 = rf(teamID, term) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// SearchPostsInTeam provides a mock function with given fields: teamID, paramsList +func (_m *API) SearchPostsInTeam(teamID string, paramsList []*model.SearchParams) ([]*model.Post, *model.AppError) { + ret := _m.Called(teamID, paramsList) + + if len(ret) == 0 { + panic("no return value specified for SearchPostsInTeam") + } + + var r0 []*model.Post + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, []*model.SearchParams) ([]*model.Post, *model.AppError)); ok { + return rf(teamID, paramsList) + } + if rf, ok := ret.Get(0).(func(string, []*model.SearchParams) []*model.Post); ok { + r0 = rf(teamID, paramsList) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.Post) + } + } + + if rf, ok := ret.Get(1).(func(string, []*model.SearchParams) *model.AppError); ok { + r1 = rf(teamID, paramsList) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// SearchPostsInTeamForUser provides a mock function with given fields: teamID, userID, searchParams +func (_m *API) SearchPostsInTeamForUser(teamID string, userID string, searchParams model.SearchParameter) (*model.PostSearchResults, *model.AppError) { + ret := _m.Called(teamID, userID, searchParams) + + if len(ret) == 0 { + panic("no return value specified for SearchPostsInTeamForUser") + } + + var r0 *model.PostSearchResults + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, string, model.SearchParameter) (*model.PostSearchResults, *model.AppError)); ok { + return rf(teamID, userID, searchParams) + } + if rf, ok := ret.Get(0).(func(string, string, model.SearchParameter) *model.PostSearchResults); ok { + r0 = rf(teamID, userID, searchParams) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.PostSearchResults) + } + } + + if rf, ok := ret.Get(1).(func(string, string, model.SearchParameter) *model.AppError); ok { + r1 = rf(teamID, userID, searchParams) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// SearchPropertyFields provides a mock function with given fields: groupID, opts +func (_m *API) SearchPropertyFields(groupID string, opts model.PropertyFieldSearchOpts) ([]*model.PropertyField, error) { + ret := _m.Called(groupID, opts) + + if len(ret) == 0 { + panic("no return value specified for SearchPropertyFields") + } + + var r0 []*model.PropertyField + var r1 error + if rf, ok := ret.Get(0).(func(string, model.PropertyFieldSearchOpts) ([]*model.PropertyField, error)); ok { + return rf(groupID, opts) + } + if rf, ok := ret.Get(0).(func(string, model.PropertyFieldSearchOpts) []*model.PropertyField); ok { + r0 = rf(groupID, opts) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.PropertyField) + } + } + + if rf, ok := ret.Get(1).(func(string, model.PropertyFieldSearchOpts) error); ok { + r1 = rf(groupID, opts) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// SearchPropertyValues provides a mock function with given fields: groupID, opts +func (_m *API) SearchPropertyValues(groupID string, opts model.PropertyValueSearchOpts) ([]*model.PropertyValue, error) { + ret := _m.Called(groupID, opts) + + if len(ret) == 0 { + panic("no return value specified for SearchPropertyValues") + } + + var r0 []*model.PropertyValue + var r1 error + if rf, ok := ret.Get(0).(func(string, model.PropertyValueSearchOpts) ([]*model.PropertyValue, error)); ok { + return rf(groupID, opts) + } + if rf, ok := ret.Get(0).(func(string, model.PropertyValueSearchOpts) []*model.PropertyValue); ok { + r0 = rf(groupID, opts) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.PropertyValue) + } + } + + if rf, ok := ret.Get(1).(func(string, model.PropertyValueSearchOpts) error); ok { + r1 = rf(groupID, opts) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// SearchTeams provides a mock function with given fields: term +func (_m *API) SearchTeams(term string) ([]*model.Team, *model.AppError) { + ret := _m.Called(term) + + if len(ret) == 0 { + panic("no return value specified for SearchTeams") + } + + var r0 []*model.Team + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string) ([]*model.Team, *model.AppError)); ok { + return rf(term) + } + if rf, ok := ret.Get(0).(func(string) []*model.Team); ok { + r0 = rf(term) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.Team) + } + } + + if rf, ok := ret.Get(1).(func(string) *model.AppError); ok { + r1 = rf(term) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// SearchUsers provides a mock function with given fields: search +func (_m *API) SearchUsers(search *model.UserSearch) ([]*model.User, *model.AppError) { + ret := _m.Called(search) + + if len(ret) == 0 { + panic("no return value specified for SearchUsers") + } + + var r0 []*model.User + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(*model.UserSearch) ([]*model.User, *model.AppError)); ok { + return rf(search) + } + if rf, ok := ret.Get(0).(func(*model.UserSearch) []*model.User); ok { + r0 = rf(search) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.User) + } + } + + if rf, ok := ret.Get(1).(func(*model.UserSearch) *model.AppError); ok { + r1 = rf(search) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// SendEphemeralPost provides a mock function with given fields: userID, post +func (_m *API) SendEphemeralPost(userID string, post *model.Post) *model.Post { + ret := _m.Called(userID, post) + + if len(ret) == 0 { + panic("no return value specified for SendEphemeralPost") + } + + var r0 *model.Post + if rf, ok := ret.Get(0).(func(string, *model.Post) *model.Post); ok { + r0 = rf(userID, post) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Post) + } + } + + return r0 +} + +// SendMail provides a mock function with given fields: to, subject, htmlBody +func (_m *API) SendMail(to string, subject string, htmlBody string) *model.AppError { + ret := _m.Called(to, subject, htmlBody) + + if len(ret) == 0 { + panic("no return value specified for SendMail") + } + + var r0 *model.AppError + if rf, ok := ret.Get(0).(func(string, string, string) *model.AppError); ok { + r0 = rf(to, subject, htmlBody) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.AppError) + } + } + + return r0 +} + +// SendPushNotification provides a mock function with given fields: notification, userID +func (_m *API) SendPushNotification(notification *model.PushNotification, userID string) *model.AppError { + ret := _m.Called(notification, userID) + + if len(ret) == 0 { + panic("no return value specified for SendPushNotification") + } + + var r0 *model.AppError + if rf, ok := ret.Get(0).(func(*model.PushNotification, string) *model.AppError); ok { + r0 = rf(notification, userID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.AppError) + } + } + + return r0 +} + +// SetFileSearchableContent provides a mock function with given fields: fileID, content +func (_m *API) SetFileSearchableContent(fileID string, content string) *model.AppError { + ret := _m.Called(fileID, content) + + if len(ret) == 0 { + panic("no return value specified for SetFileSearchableContent") + } + + var r0 *model.AppError + if rf, ok := ret.Get(0).(func(string, string) *model.AppError); ok { + r0 = rf(fileID, content) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.AppError) + } + } + + return r0 +} + +// SetProfileImage provides a mock function with given fields: userID, data +func (_m *API) SetProfileImage(userID string, data []byte) *model.AppError { + ret := _m.Called(userID, data) + + if len(ret) == 0 { + panic("no return value specified for SetProfileImage") + } + + var r0 *model.AppError + if rf, ok := ret.Get(0).(func(string, []byte) *model.AppError); ok { + r0 = rf(userID, data) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.AppError) + } + } + + return r0 +} + +// SetTeamIcon provides a mock function with given fields: teamID, data +func (_m *API) SetTeamIcon(teamID string, data []byte) *model.AppError { + ret := _m.Called(teamID, data) + + if len(ret) == 0 { + panic("no return value specified for SetTeamIcon") + } + + var r0 *model.AppError + if rf, ok := ret.Get(0).(func(string, []byte) *model.AppError); ok { + r0 = rf(teamID, data) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.AppError) + } + } + + return r0 +} + +// SetUserStatusTimedDND provides a mock function with given fields: userId, endtime +func (_m *API) SetUserStatusTimedDND(userId string, endtime int64) (*model.Status, *model.AppError) { + ret := _m.Called(userId, endtime) + + if len(ret) == 0 { + panic("no return value specified for SetUserStatusTimedDND") + } + + var r0 *model.Status + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, int64) (*model.Status, *model.AppError)); ok { + return rf(userId, endtime) + } + if rf, ok := ret.Get(0).(func(string, int64) *model.Status); ok { + r0 = rf(userId, endtime) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Status) + } + } + + if rf, ok := ret.Get(1).(func(string, int64) *model.AppError); ok { + r1 = rf(userId, endtime) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// ShareChannel provides a mock function with given fields: sc +func (_m *API) ShareChannel(sc *model.SharedChannel) (*model.SharedChannel, error) { + ret := _m.Called(sc) + + if len(ret) == 0 { + panic("no return value specified for ShareChannel") + } + + var r0 *model.SharedChannel + var r1 error + if rf, ok := ret.Get(0).(func(*model.SharedChannel) (*model.SharedChannel, error)); ok { + return rf(sc) + } + if rf, ok := ret.Get(0).(func(*model.SharedChannel) *model.SharedChannel); ok { + r0 = rf(sc) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.SharedChannel) + } + } + + if rf, ok := ret.Get(1).(func(*model.SharedChannel) error); ok { + r1 = rf(sc) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// SyncSharedChannel provides a mock function with given fields: channelID +func (_m *API) SyncSharedChannel(channelID string) error { + ret := _m.Called(channelID) + + if len(ret) == 0 { + panic("no return value specified for SyncSharedChannel") + } + + var r0 error + if rf, ok := ret.Get(0).(func(string) error); ok { + r0 = rf(channelID) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// UninviteRemoteFromChannel provides a mock function with given fields: channelID, remoteID +func (_m *API) UninviteRemoteFromChannel(channelID string, remoteID string) error { + ret := _m.Called(channelID, remoteID) + + if len(ret) == 0 { + panic("no return value specified for UninviteRemoteFromChannel") + } + + var r0 error + if rf, ok := ret.Get(0).(func(string, string) error); ok { + r0 = rf(channelID, remoteID) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// UnregisterCommand provides a mock function with given fields: teamID, trigger +func (_m *API) UnregisterCommand(teamID string, trigger string) error { + ret := _m.Called(teamID, trigger) + + if len(ret) == 0 { + panic("no return value specified for UnregisterCommand") + } + + var r0 error + if rf, ok := ret.Get(0).(func(string, string) error); ok { + r0 = rf(teamID, trigger) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// UnregisterPluginForSharedChannels provides a mock function with given fields: pluginID +func (_m *API) UnregisterPluginForSharedChannels(pluginID string) error { + ret := _m.Called(pluginID) + + if len(ret) == 0 { + panic("no return value specified for UnregisterPluginForSharedChannels") + } + + var r0 error + if rf, ok := ret.Get(0).(func(string) error); ok { + r0 = rf(pluginID) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// UnshareChannel provides a mock function with given fields: channelID +func (_m *API) UnshareChannel(channelID string) (bool, error) { + ret := _m.Called(channelID) + + if len(ret) == 0 { + panic("no return value specified for UnshareChannel") + } + + var r0 bool + var r1 error + if rf, ok := ret.Get(0).(func(string) (bool, error)); ok { + return rf(channelID) + } + if rf, ok := ret.Get(0).(func(string) bool); ok { + r0 = rf(channelID) + } else { + r0 = ret.Get(0).(bool) + } + + if rf, ok := ret.Get(1).(func(string) error); ok { + r1 = rf(channelID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// UpdateBotActive provides a mock function with given fields: botUserId, active +func (_m *API) UpdateBotActive(botUserId string, active bool) (*model.Bot, *model.AppError) { + ret := _m.Called(botUserId, active) + + if len(ret) == 0 { + panic("no return value specified for UpdateBotActive") + } + + var r0 *model.Bot + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, bool) (*model.Bot, *model.AppError)); ok { + return rf(botUserId, active) + } + if rf, ok := ret.Get(0).(func(string, bool) *model.Bot); ok { + r0 = rf(botUserId, active) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Bot) + } + } + + if rf, ok := ret.Get(1).(func(string, bool) *model.AppError); ok { + r1 = rf(botUserId, active) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// UpdateChannel provides a mock function with given fields: channel +func (_m *API) UpdateChannel(channel *model.Channel) (*model.Channel, *model.AppError) { + ret := _m.Called(channel) + + if len(ret) == 0 { + panic("no return value specified for UpdateChannel") + } + + var r0 *model.Channel + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(*model.Channel) (*model.Channel, *model.AppError)); ok { + return rf(channel) + } + if rf, ok := ret.Get(0).(func(*model.Channel) *model.Channel); ok { + r0 = rf(channel) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Channel) + } + } + + if rf, ok := ret.Get(1).(func(*model.Channel) *model.AppError); ok { + r1 = rf(channel) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// UpdateChannelMemberNotifications provides a mock function with given fields: channelId, userID, notifications +func (_m *API) UpdateChannelMemberNotifications(channelId string, userID string, notifications map[string]string) (*model.ChannelMember, *model.AppError) { + ret := _m.Called(channelId, userID, notifications) + + if len(ret) == 0 { + panic("no return value specified for UpdateChannelMemberNotifications") + } + + var r0 *model.ChannelMember + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, string, map[string]string) (*model.ChannelMember, *model.AppError)); ok { + return rf(channelId, userID, notifications) + } + if rf, ok := ret.Get(0).(func(string, string, map[string]string) *model.ChannelMember); ok { + r0 = rf(channelId, userID, notifications) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.ChannelMember) + } + } + + if rf, ok := ret.Get(1).(func(string, string, map[string]string) *model.AppError); ok { + r1 = rf(channelId, userID, notifications) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// UpdateChannelMemberRoles provides a mock function with given fields: channelId, userID, newRoles +func (_m *API) UpdateChannelMemberRoles(channelId string, userID string, newRoles string) (*model.ChannelMember, *model.AppError) { + ret := _m.Called(channelId, userID, newRoles) + + if len(ret) == 0 { + panic("no return value specified for UpdateChannelMemberRoles") + } + + var r0 *model.ChannelMember + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, string, string) (*model.ChannelMember, *model.AppError)); ok { + return rf(channelId, userID, newRoles) + } + if rf, ok := ret.Get(0).(func(string, string, string) *model.ChannelMember); ok { + r0 = rf(channelId, userID, newRoles) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.ChannelMember) + } + } + + if rf, ok := ret.Get(1).(func(string, string, string) *model.AppError); ok { + r1 = rf(channelId, userID, newRoles) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// UpdateChannelSidebarCategories provides a mock function with given fields: userID, teamID, categories +func (_m *API) UpdateChannelSidebarCategories(userID string, teamID string, categories []*model.SidebarCategoryWithChannels) ([]*model.SidebarCategoryWithChannels, *model.AppError) { + ret := _m.Called(userID, teamID, categories) + + if len(ret) == 0 { + panic("no return value specified for UpdateChannelSidebarCategories") + } + + var r0 []*model.SidebarCategoryWithChannels + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, string, []*model.SidebarCategoryWithChannels) ([]*model.SidebarCategoryWithChannels, *model.AppError)); ok { + return rf(userID, teamID, categories) + } + if rf, ok := ret.Get(0).(func(string, string, []*model.SidebarCategoryWithChannels) []*model.SidebarCategoryWithChannels); ok { + r0 = rf(userID, teamID, categories) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.SidebarCategoryWithChannels) + } + } + + if rf, ok := ret.Get(1).(func(string, string, []*model.SidebarCategoryWithChannels) *model.AppError); ok { + r1 = rf(userID, teamID, categories) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// UpdateCommand provides a mock function with given fields: commandID, updatedCmd +func (_m *API) UpdateCommand(commandID string, updatedCmd *model.Command) (*model.Command, error) { + ret := _m.Called(commandID, updatedCmd) + + if len(ret) == 0 { + panic("no return value specified for UpdateCommand") + } + + var r0 *model.Command + var r1 error + if rf, ok := ret.Get(0).(func(string, *model.Command) (*model.Command, error)); ok { + return rf(commandID, updatedCmd) + } + if rf, ok := ret.Get(0).(func(string, *model.Command) *model.Command); ok { + r0 = rf(commandID, updatedCmd) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Command) + } + } + + if rf, ok := ret.Get(1).(func(string, *model.Command) error); ok { + r1 = rf(commandID, updatedCmd) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// UpdateEphemeralPost provides a mock function with given fields: userID, post +func (_m *API) UpdateEphemeralPost(userID string, post *model.Post) *model.Post { + ret := _m.Called(userID, post) + + if len(ret) == 0 { + panic("no return value specified for UpdateEphemeralPost") + } + + var r0 *model.Post + if rf, ok := ret.Get(0).(func(string, *model.Post) *model.Post); ok { + r0 = rf(userID, post) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Post) + } + } + + return r0 +} + +// UpdateGroup provides a mock function with given fields: group +func (_m *API) UpdateGroup(group *model.Group) (*model.Group, *model.AppError) { + ret := _m.Called(group) + + if len(ret) == 0 { + panic("no return value specified for UpdateGroup") + } + + var r0 *model.Group + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(*model.Group) (*model.Group, *model.AppError)); ok { + return rf(group) + } + if rf, ok := ret.Get(0).(func(*model.Group) *model.Group); ok { + r0 = rf(group) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Group) + } + } + + if rf, ok := ret.Get(1).(func(*model.Group) *model.AppError); ok { + r1 = rf(group) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// UpdateGroupSyncable provides a mock function with given fields: groupSyncable +func (_m *API) UpdateGroupSyncable(groupSyncable *model.GroupSyncable) (*model.GroupSyncable, *model.AppError) { + ret := _m.Called(groupSyncable) + + if len(ret) == 0 { + panic("no return value specified for UpdateGroupSyncable") + } + + var r0 *model.GroupSyncable + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(*model.GroupSyncable) (*model.GroupSyncable, *model.AppError)); ok { + return rf(groupSyncable) + } + if rf, ok := ret.Get(0).(func(*model.GroupSyncable) *model.GroupSyncable); ok { + r0 = rf(groupSyncable) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.GroupSyncable) + } + } + + if rf, ok := ret.Get(1).(func(*model.GroupSyncable) *model.AppError); ok { + r1 = rf(groupSyncable) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// UpdateOAuthApp provides a mock function with given fields: app +func (_m *API) UpdateOAuthApp(app *model.OAuthApp) (*model.OAuthApp, *model.AppError) { + ret := _m.Called(app) + + if len(ret) == 0 { + panic("no return value specified for UpdateOAuthApp") + } + + var r0 *model.OAuthApp + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(*model.OAuthApp) (*model.OAuthApp, *model.AppError)); ok { + return rf(app) + } + if rf, ok := ret.Get(0).(func(*model.OAuthApp) *model.OAuthApp); ok { + r0 = rf(app) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.OAuthApp) + } + } + + if rf, ok := ret.Get(1).(func(*model.OAuthApp) *model.AppError); ok { + r1 = rf(app) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// UpdatePost provides a mock function with given fields: post +func (_m *API) UpdatePost(post *model.Post) (*model.Post, *model.AppError) { + ret := _m.Called(post) + + if len(ret) == 0 { + panic("no return value specified for UpdatePost") + } + + var r0 *model.Post + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(*model.Post) (*model.Post, *model.AppError)); ok { + return rf(post) + } + if rf, ok := ret.Get(0).(func(*model.Post) *model.Post); ok { + r0 = rf(post) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Post) + } + } + + if rf, ok := ret.Get(1).(func(*model.Post) *model.AppError); ok { + r1 = rf(post) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// UpdatePreferencesForUser provides a mock function with given fields: userID, preferences +func (_m *API) UpdatePreferencesForUser(userID string, preferences []model.Preference) *model.AppError { + ret := _m.Called(userID, preferences) + + if len(ret) == 0 { + panic("no return value specified for UpdatePreferencesForUser") + } + + var r0 *model.AppError + if rf, ok := ret.Get(0).(func(string, []model.Preference) *model.AppError); ok { + r0 = rf(userID, preferences) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.AppError) + } + } + + return r0 +} + +// UpdatePropertyField provides a mock function with given fields: groupID, field +func (_m *API) UpdatePropertyField(groupID string, field *model.PropertyField) (*model.PropertyField, error) { + ret := _m.Called(groupID, field) + + if len(ret) == 0 { + panic("no return value specified for UpdatePropertyField") + } + + var r0 *model.PropertyField + var r1 error + if rf, ok := ret.Get(0).(func(string, *model.PropertyField) (*model.PropertyField, error)); ok { + return rf(groupID, field) + } + if rf, ok := ret.Get(0).(func(string, *model.PropertyField) *model.PropertyField); ok { + r0 = rf(groupID, field) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.PropertyField) + } + } + + if rf, ok := ret.Get(1).(func(string, *model.PropertyField) error); ok { + r1 = rf(groupID, field) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// UpdatePropertyFields provides a mock function with given fields: groupID, fields +func (_m *API) UpdatePropertyFields(groupID string, fields []*model.PropertyField) ([]*model.PropertyField, error) { + ret := _m.Called(groupID, fields) + + if len(ret) == 0 { + panic("no return value specified for UpdatePropertyFields") + } + + var r0 []*model.PropertyField + var r1 error + if rf, ok := ret.Get(0).(func(string, []*model.PropertyField) ([]*model.PropertyField, error)); ok { + return rf(groupID, fields) + } + if rf, ok := ret.Get(0).(func(string, []*model.PropertyField) []*model.PropertyField); ok { + r0 = rf(groupID, fields) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.PropertyField) + } + } + + if rf, ok := ret.Get(1).(func(string, []*model.PropertyField) error); ok { + r1 = rf(groupID, fields) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// UpdatePropertyValue provides a mock function with given fields: groupID, value +func (_m *API) UpdatePropertyValue(groupID string, value *model.PropertyValue) (*model.PropertyValue, error) { + ret := _m.Called(groupID, value) + + if len(ret) == 0 { + panic("no return value specified for UpdatePropertyValue") + } + + var r0 *model.PropertyValue + var r1 error + if rf, ok := ret.Get(0).(func(string, *model.PropertyValue) (*model.PropertyValue, error)); ok { + return rf(groupID, value) + } + if rf, ok := ret.Get(0).(func(string, *model.PropertyValue) *model.PropertyValue); ok { + r0 = rf(groupID, value) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.PropertyValue) + } + } + + if rf, ok := ret.Get(1).(func(string, *model.PropertyValue) error); ok { + r1 = rf(groupID, value) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// UpdatePropertyValues provides a mock function with given fields: groupID, values +func (_m *API) UpdatePropertyValues(groupID string, values []*model.PropertyValue) ([]*model.PropertyValue, error) { + ret := _m.Called(groupID, values) + + if len(ret) == 0 { + panic("no return value specified for UpdatePropertyValues") + } + + var r0 []*model.PropertyValue + var r1 error + if rf, ok := ret.Get(0).(func(string, []*model.PropertyValue) ([]*model.PropertyValue, error)); ok { + return rf(groupID, values) + } + if rf, ok := ret.Get(0).(func(string, []*model.PropertyValue) []*model.PropertyValue); ok { + r0 = rf(groupID, values) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.PropertyValue) + } + } + + if rf, ok := ret.Get(1).(func(string, []*model.PropertyValue) error); ok { + r1 = rf(groupID, values) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// UpdateSharedChannel provides a mock function with given fields: sc +func (_m *API) UpdateSharedChannel(sc *model.SharedChannel) (*model.SharedChannel, error) { + ret := _m.Called(sc) + + if len(ret) == 0 { + panic("no return value specified for UpdateSharedChannel") + } + + var r0 *model.SharedChannel + var r1 error + if rf, ok := ret.Get(0).(func(*model.SharedChannel) (*model.SharedChannel, error)); ok { + return rf(sc) + } + if rf, ok := ret.Get(0).(func(*model.SharedChannel) *model.SharedChannel); ok { + r0 = rf(sc) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.SharedChannel) + } + } + + if rf, ok := ret.Get(1).(func(*model.SharedChannel) error); ok { + r1 = rf(sc) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// UpdateSharedChannelCursor provides a mock function with given fields: channelID, remoteID, cusror +func (_m *API) UpdateSharedChannelCursor(channelID string, remoteID string, cusror model.GetPostsSinceForSyncCursor) error { + ret := _m.Called(channelID, remoteID, cusror) + + if len(ret) == 0 { + panic("no return value specified for UpdateSharedChannelCursor") + } + + var r0 error + if rf, ok := ret.Get(0).(func(string, string, model.GetPostsSinceForSyncCursor) error); ok { + r0 = rf(channelID, remoteID, cusror) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// UpdateTeam provides a mock function with given fields: team +func (_m *API) UpdateTeam(team *model.Team) (*model.Team, *model.AppError) { + ret := _m.Called(team) + + if len(ret) == 0 { + panic("no return value specified for UpdateTeam") + } + + var r0 *model.Team + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(*model.Team) (*model.Team, *model.AppError)); ok { + return rf(team) + } + if rf, ok := ret.Get(0).(func(*model.Team) *model.Team); ok { + r0 = rf(team) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Team) + } + } + + if rf, ok := ret.Get(1).(func(*model.Team) *model.AppError); ok { + r1 = rf(team) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// UpdateTeamMemberRoles provides a mock function with given fields: teamID, userID, newRoles +func (_m *API) UpdateTeamMemberRoles(teamID string, userID string, newRoles string) (*model.TeamMember, *model.AppError) { + ret := _m.Called(teamID, userID, newRoles) + + if len(ret) == 0 { + panic("no return value specified for UpdateTeamMemberRoles") + } + + var r0 *model.TeamMember + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, string, string) (*model.TeamMember, *model.AppError)); ok { + return rf(teamID, userID, newRoles) + } + if rf, ok := ret.Get(0).(func(string, string, string) *model.TeamMember); ok { + r0 = rf(teamID, userID, newRoles) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.TeamMember) + } + } + + if rf, ok := ret.Get(1).(func(string, string, string) *model.AppError); ok { + r1 = rf(teamID, userID, newRoles) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// UpdateUser provides a mock function with given fields: user +func (_m *API) UpdateUser(user *model.User) (*model.User, *model.AppError) { + ret := _m.Called(user) + + if len(ret) == 0 { + panic("no return value specified for UpdateUser") + } + + var r0 *model.User + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(*model.User) (*model.User, *model.AppError)); ok { + return rf(user) + } + if rf, ok := ret.Get(0).(func(*model.User) *model.User); ok { + r0 = rf(user) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.User) + } + } + + if rf, ok := ret.Get(1).(func(*model.User) *model.AppError); ok { + r1 = rf(user) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// UpdateUserActive provides a mock function with given fields: userID, active +func (_m *API) UpdateUserActive(userID string, active bool) *model.AppError { + ret := _m.Called(userID, active) + + if len(ret) == 0 { + panic("no return value specified for UpdateUserActive") + } + + var r0 *model.AppError + if rf, ok := ret.Get(0).(func(string, bool) *model.AppError); ok { + r0 = rf(userID, active) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.AppError) + } + } + + return r0 +} + +// UpdateUserAuth provides a mock function with given fields: userID, userAuth +func (_m *API) UpdateUserAuth(userID string, userAuth *model.UserAuth) (*model.UserAuth, *model.AppError) { + ret := _m.Called(userID, userAuth) + + if len(ret) == 0 { + panic("no return value specified for UpdateUserAuth") + } + + var r0 *model.UserAuth + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, *model.UserAuth) (*model.UserAuth, *model.AppError)); ok { + return rf(userID, userAuth) + } + if rf, ok := ret.Get(0).(func(string, *model.UserAuth) *model.UserAuth); ok { + r0 = rf(userID, userAuth) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.UserAuth) + } + } + + if rf, ok := ret.Get(1).(func(string, *model.UserAuth) *model.AppError); ok { + r1 = rf(userID, userAuth) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// UpdateUserCustomStatus provides a mock function with given fields: userID, customStatus +func (_m *API) UpdateUserCustomStatus(userID string, customStatus *model.CustomStatus) *model.AppError { + ret := _m.Called(userID, customStatus) + + if len(ret) == 0 { + panic("no return value specified for UpdateUserCustomStatus") + } + + var r0 *model.AppError + if rf, ok := ret.Get(0).(func(string, *model.CustomStatus) *model.AppError); ok { + r0 = rf(userID, customStatus) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.AppError) + } + } + + return r0 +} + +// UpdateUserRoles provides a mock function with given fields: userID, newRoles +func (_m *API) UpdateUserRoles(userID string, newRoles string) (*model.User, *model.AppError) { + ret := _m.Called(userID, newRoles) + + if len(ret) == 0 { + panic("no return value specified for UpdateUserRoles") + } + + var r0 *model.User + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, string) (*model.User, *model.AppError)); ok { + return rf(userID, newRoles) + } + if rf, ok := ret.Get(0).(func(string, string) *model.User); ok { + r0 = rf(userID, newRoles) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.User) + } + } + + if rf, ok := ret.Get(1).(func(string, string) *model.AppError); ok { + r1 = rf(userID, newRoles) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// UpdateUserStatus provides a mock function with given fields: userID, status +func (_m *API) UpdateUserStatus(userID string, status string) (*model.Status, *model.AppError) { + ret := _m.Called(userID, status) + + if len(ret) == 0 { + panic("no return value specified for UpdateUserStatus") + } + + var r0 *model.Status + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, string) (*model.Status, *model.AppError)); ok { + return rf(userID, status) + } + if rf, ok := ret.Get(0).(func(string, string) *model.Status); ok { + r0 = rf(userID, status) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Status) + } + } + + if rf, ok := ret.Get(1).(func(string, string) *model.AppError); ok { + r1 = rf(userID, status) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// UploadData provides a mock function with given fields: us, rd +func (_m *API) UploadData(us *model.UploadSession, rd io.Reader) (*model.FileInfo, error) { + ret := _m.Called(us, rd) + + if len(ret) == 0 { + panic("no return value specified for UploadData") + } + + var r0 *model.FileInfo + var r1 error + if rf, ok := ret.Get(0).(func(*model.UploadSession, io.Reader) (*model.FileInfo, error)); ok { + return rf(us, rd) + } + if rf, ok := ret.Get(0).(func(*model.UploadSession, io.Reader) *model.FileInfo); ok { + r0 = rf(us, rd) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.FileInfo) + } + } + + if rf, ok := ret.Get(1).(func(*model.UploadSession, io.Reader) error); ok { + r1 = rf(us, rd) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// UploadFile provides a mock function with given fields: data, channelId, filename +func (_m *API) UploadFile(data []byte, channelId string, filename string) (*model.FileInfo, *model.AppError) { + ret := _m.Called(data, channelId, filename) + + if len(ret) == 0 { + panic("no return value specified for UploadFile") + } + + var r0 *model.FileInfo + var r1 *model.AppError + if rf, ok := ret.Get(0).(func([]byte, string, string) (*model.FileInfo, *model.AppError)); ok { + return rf(data, channelId, filename) + } + if rf, ok := ret.Get(0).(func([]byte, string, string) *model.FileInfo); ok { + r0 = rf(data, channelId, filename) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.FileInfo) + } + } + + if rf, ok := ret.Get(1).(func([]byte, string, string) *model.AppError); ok { + r1 = rf(data, channelId, filename) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// UpsertGroupMember provides a mock function with given fields: groupID, userID +func (_m *API) UpsertGroupMember(groupID string, userID string) (*model.GroupMember, *model.AppError) { + ret := _m.Called(groupID, userID) + + if len(ret) == 0 { + panic("no return value specified for UpsertGroupMember") + } + + var r0 *model.GroupMember + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, string) (*model.GroupMember, *model.AppError)); ok { + return rf(groupID, userID) + } + if rf, ok := ret.Get(0).(func(string, string) *model.GroupMember); ok { + r0 = rf(groupID, userID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.GroupMember) + } + } + + if rf, ok := ret.Get(1).(func(string, string) *model.AppError); ok { + r1 = rf(groupID, userID) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// UpsertGroupMembers provides a mock function with given fields: groupID, userIDs +func (_m *API) UpsertGroupMembers(groupID string, userIDs []string) ([]*model.GroupMember, *model.AppError) { + ret := _m.Called(groupID, userIDs) + + if len(ret) == 0 { + panic("no return value specified for UpsertGroupMembers") + } + + var r0 []*model.GroupMember + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(string, []string) ([]*model.GroupMember, *model.AppError)); ok { + return rf(groupID, userIDs) + } + if rf, ok := ret.Get(0).(func(string, []string) []*model.GroupMember); ok { + r0 = rf(groupID, userIDs) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.GroupMember) + } + } + + if rf, ok := ret.Get(1).(func(string, []string) *model.AppError); ok { + r1 = rf(groupID, userIDs) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// UpsertGroupSyncable provides a mock function with given fields: groupSyncable +func (_m *API) UpsertGroupSyncable(groupSyncable *model.GroupSyncable) (*model.GroupSyncable, *model.AppError) { + ret := _m.Called(groupSyncable) + + if len(ret) == 0 { + panic("no return value specified for UpsertGroupSyncable") + } + + var r0 *model.GroupSyncable + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(*model.GroupSyncable) (*model.GroupSyncable, *model.AppError)); ok { + return rf(groupSyncable) + } + if rf, ok := ret.Get(0).(func(*model.GroupSyncable) *model.GroupSyncable); ok { + r0 = rf(groupSyncable) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.GroupSyncable) + } + } + + if rf, ok := ret.Get(1).(func(*model.GroupSyncable) *model.AppError); ok { + r1 = rf(groupSyncable) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// UpsertPropertyValue provides a mock function with given fields: value +func (_m *API) UpsertPropertyValue(value *model.PropertyValue) (*model.PropertyValue, error) { + ret := _m.Called(value) + + if len(ret) == 0 { + panic("no return value specified for UpsertPropertyValue") + } + + var r0 *model.PropertyValue + var r1 error + if rf, ok := ret.Get(0).(func(*model.PropertyValue) (*model.PropertyValue, error)); ok { + return rf(value) + } + if rf, ok := ret.Get(0).(func(*model.PropertyValue) *model.PropertyValue); ok { + r0 = rf(value) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.PropertyValue) + } + } + + if rf, ok := ret.Get(1).(func(*model.PropertyValue) error); ok { + r1 = rf(value) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// UpsertPropertyValues provides a mock function with given fields: values +func (_m *API) UpsertPropertyValues(values []*model.PropertyValue) ([]*model.PropertyValue, error) { + ret := _m.Called(values) + + if len(ret) == 0 { + panic("no return value specified for UpsertPropertyValues") + } + + var r0 []*model.PropertyValue + var r1 error + if rf, ok := ret.Get(0).(func([]*model.PropertyValue) ([]*model.PropertyValue, error)); ok { + return rf(values) + } + if rf, ok := ret.Get(0).(func([]*model.PropertyValue) []*model.PropertyValue); ok { + r0 = rf(values) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.PropertyValue) + } + } + + if rf, ok := ret.Get(1).(func([]*model.PropertyValue) error); ok { + r1 = rf(values) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// NewAPI creates a new instance of API. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewAPI(t interface { + mock.TestingT + Cleanup(func()) +}) *API { + mock := &API{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/plugin/plugintest/doc.go b/vendor/github.com/mattermost/mattermost/server/public/plugin/plugintest/doc.go new file mode 100644 index 00000000..d43fbe0e --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/plugin/plugintest/doc.go @@ -0,0 +1,11 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +// The plugintest package provides mocks that can be used to test plugins. +// +// The mocks are created using testify's mock package: +// https://godoc.org/github.com/stretchr/testify/mock +// +// If you need to import the mock package, you can import it with +// "github.com/mattermost/mattermost/server/public/plugin/plugintest/mock". +package plugintest diff --git a/vendor/github.com/mattermost/mattermost/server/public/plugin/plugintest/driver.go b/vendor/github.com/mattermost/mattermost/server/public/plugin/plugintest/driver.go new file mode 100644 index 00000000..4c158856 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/plugin/plugintest/driver.go @@ -0,0 +1,481 @@ +// Code generated by mockery v2.53.4. DO NOT EDIT. + +// Regenerate this file using `make plugin-mocks`. + +package plugintest + +import ( + driver "database/sql/driver" + + mock "github.com/stretchr/testify/mock" + + plugin "github.com/mattermost/mattermost/server/public/plugin" +) + +// Driver is an autogenerated mock type for the Driver type +type Driver struct { + mock.Mock +} + +// Conn provides a mock function with given fields: isMaster +func (_m *Driver) Conn(isMaster bool) (string, error) { + ret := _m.Called(isMaster) + + if len(ret) == 0 { + panic("no return value specified for Conn") + } + + var r0 string + var r1 error + if rf, ok := ret.Get(0).(func(bool) (string, error)); ok { + return rf(isMaster) + } + if rf, ok := ret.Get(0).(func(bool) string); ok { + r0 = rf(isMaster) + } else { + r0 = ret.Get(0).(string) + } + + if rf, ok := ret.Get(1).(func(bool) error); ok { + r1 = rf(isMaster) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// ConnClose provides a mock function with given fields: connID +func (_m *Driver) ConnClose(connID string) error { + ret := _m.Called(connID) + + if len(ret) == 0 { + panic("no return value specified for ConnClose") + } + + var r0 error + if rf, ok := ret.Get(0).(func(string) error); ok { + r0 = rf(connID) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// ConnExec provides a mock function with given fields: connID, q, args +func (_m *Driver) ConnExec(connID string, q string, args []driver.NamedValue) (plugin.ResultContainer, error) { + ret := _m.Called(connID, q, args) + + if len(ret) == 0 { + panic("no return value specified for ConnExec") + } + + var r0 plugin.ResultContainer + var r1 error + if rf, ok := ret.Get(0).(func(string, string, []driver.NamedValue) (plugin.ResultContainer, error)); ok { + return rf(connID, q, args) + } + if rf, ok := ret.Get(0).(func(string, string, []driver.NamedValue) plugin.ResultContainer); ok { + r0 = rf(connID, q, args) + } else { + r0 = ret.Get(0).(plugin.ResultContainer) + } + + if rf, ok := ret.Get(1).(func(string, string, []driver.NamedValue) error); ok { + r1 = rf(connID, q, args) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// ConnPing provides a mock function with given fields: connID +func (_m *Driver) ConnPing(connID string) error { + ret := _m.Called(connID) + + if len(ret) == 0 { + panic("no return value specified for ConnPing") + } + + var r0 error + if rf, ok := ret.Get(0).(func(string) error); ok { + r0 = rf(connID) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// ConnQuery provides a mock function with given fields: connID, q, args +func (_m *Driver) ConnQuery(connID string, q string, args []driver.NamedValue) (string, error) { + ret := _m.Called(connID, q, args) + + if len(ret) == 0 { + panic("no return value specified for ConnQuery") + } + + var r0 string + var r1 error + if rf, ok := ret.Get(0).(func(string, string, []driver.NamedValue) (string, error)); ok { + return rf(connID, q, args) + } + if rf, ok := ret.Get(0).(func(string, string, []driver.NamedValue) string); ok { + r0 = rf(connID, q, args) + } else { + r0 = ret.Get(0).(string) + } + + if rf, ok := ret.Get(1).(func(string, string, []driver.NamedValue) error); ok { + r1 = rf(connID, q, args) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// RowsClose provides a mock function with given fields: rowsID +func (_m *Driver) RowsClose(rowsID string) error { + ret := _m.Called(rowsID) + + if len(ret) == 0 { + panic("no return value specified for RowsClose") + } + + var r0 error + if rf, ok := ret.Get(0).(func(string) error); ok { + r0 = rf(rowsID) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// RowsColumnTypeDatabaseTypeName provides a mock function with given fields: rowsID, index +func (_m *Driver) RowsColumnTypeDatabaseTypeName(rowsID string, index int) string { + ret := _m.Called(rowsID, index) + + if len(ret) == 0 { + panic("no return value specified for RowsColumnTypeDatabaseTypeName") + } + + var r0 string + if rf, ok := ret.Get(0).(func(string, int) string); ok { + r0 = rf(rowsID, index) + } else { + r0 = ret.Get(0).(string) + } + + return r0 +} + +// RowsColumnTypePrecisionScale provides a mock function with given fields: rowsID, index +func (_m *Driver) RowsColumnTypePrecisionScale(rowsID string, index int) (int64, int64, bool) { + ret := _m.Called(rowsID, index) + + if len(ret) == 0 { + panic("no return value specified for RowsColumnTypePrecisionScale") + } + + var r0 int64 + var r1 int64 + var r2 bool + if rf, ok := ret.Get(0).(func(string, int) (int64, int64, bool)); ok { + return rf(rowsID, index) + } + if rf, ok := ret.Get(0).(func(string, int) int64); ok { + r0 = rf(rowsID, index) + } else { + r0 = ret.Get(0).(int64) + } + + if rf, ok := ret.Get(1).(func(string, int) int64); ok { + r1 = rf(rowsID, index) + } else { + r1 = ret.Get(1).(int64) + } + + if rf, ok := ret.Get(2).(func(string, int) bool); ok { + r2 = rf(rowsID, index) + } else { + r2 = ret.Get(2).(bool) + } + + return r0, r1, r2 +} + +// RowsColumns provides a mock function with given fields: rowsID +func (_m *Driver) RowsColumns(rowsID string) []string { + ret := _m.Called(rowsID) + + if len(ret) == 0 { + panic("no return value specified for RowsColumns") + } + + var r0 []string + if rf, ok := ret.Get(0).(func(string) []string); ok { + r0 = rf(rowsID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]string) + } + } + + return r0 +} + +// RowsHasNextResultSet provides a mock function with given fields: rowsID +func (_m *Driver) RowsHasNextResultSet(rowsID string) bool { + ret := _m.Called(rowsID) + + if len(ret) == 0 { + panic("no return value specified for RowsHasNextResultSet") + } + + var r0 bool + if rf, ok := ret.Get(0).(func(string) bool); ok { + r0 = rf(rowsID) + } else { + r0 = ret.Get(0).(bool) + } + + return r0 +} + +// RowsNext provides a mock function with given fields: rowsID, dest +func (_m *Driver) RowsNext(rowsID string, dest []driver.Value) error { + ret := _m.Called(rowsID, dest) + + if len(ret) == 0 { + panic("no return value specified for RowsNext") + } + + var r0 error + if rf, ok := ret.Get(0).(func(string, []driver.Value) error); ok { + r0 = rf(rowsID, dest) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// RowsNextResultSet provides a mock function with given fields: rowsID +func (_m *Driver) RowsNextResultSet(rowsID string) error { + ret := _m.Called(rowsID) + + if len(ret) == 0 { + panic("no return value specified for RowsNextResultSet") + } + + var r0 error + if rf, ok := ret.Get(0).(func(string) error); ok { + r0 = rf(rowsID) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Stmt provides a mock function with given fields: connID, q +func (_m *Driver) Stmt(connID string, q string) (string, error) { + ret := _m.Called(connID, q) + + if len(ret) == 0 { + panic("no return value specified for Stmt") + } + + var r0 string + var r1 error + if rf, ok := ret.Get(0).(func(string, string) (string, error)); ok { + return rf(connID, q) + } + if rf, ok := ret.Get(0).(func(string, string) string); ok { + r0 = rf(connID, q) + } else { + r0 = ret.Get(0).(string) + } + + if rf, ok := ret.Get(1).(func(string, string) error); ok { + r1 = rf(connID, q) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// StmtClose provides a mock function with given fields: stID +func (_m *Driver) StmtClose(stID string) error { + ret := _m.Called(stID) + + if len(ret) == 0 { + panic("no return value specified for StmtClose") + } + + var r0 error + if rf, ok := ret.Get(0).(func(string) error); ok { + r0 = rf(stID) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// StmtExec provides a mock function with given fields: stID, args +func (_m *Driver) StmtExec(stID string, args []driver.NamedValue) (plugin.ResultContainer, error) { + ret := _m.Called(stID, args) + + if len(ret) == 0 { + panic("no return value specified for StmtExec") + } + + var r0 plugin.ResultContainer + var r1 error + if rf, ok := ret.Get(0).(func(string, []driver.NamedValue) (plugin.ResultContainer, error)); ok { + return rf(stID, args) + } + if rf, ok := ret.Get(0).(func(string, []driver.NamedValue) plugin.ResultContainer); ok { + r0 = rf(stID, args) + } else { + r0 = ret.Get(0).(plugin.ResultContainer) + } + + if rf, ok := ret.Get(1).(func(string, []driver.NamedValue) error); ok { + r1 = rf(stID, args) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// StmtNumInput provides a mock function with given fields: stID +func (_m *Driver) StmtNumInput(stID string) int { + ret := _m.Called(stID) + + if len(ret) == 0 { + panic("no return value specified for StmtNumInput") + } + + var r0 int + if rf, ok := ret.Get(0).(func(string) int); ok { + r0 = rf(stID) + } else { + r0 = ret.Get(0).(int) + } + + return r0 +} + +// StmtQuery provides a mock function with given fields: stID, args +func (_m *Driver) StmtQuery(stID string, args []driver.NamedValue) (string, error) { + ret := _m.Called(stID, args) + + if len(ret) == 0 { + panic("no return value specified for StmtQuery") + } + + var r0 string + var r1 error + if rf, ok := ret.Get(0).(func(string, []driver.NamedValue) (string, error)); ok { + return rf(stID, args) + } + if rf, ok := ret.Get(0).(func(string, []driver.NamedValue) string); ok { + r0 = rf(stID, args) + } else { + r0 = ret.Get(0).(string) + } + + if rf, ok := ret.Get(1).(func(string, []driver.NamedValue) error); ok { + r1 = rf(stID, args) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Tx provides a mock function with given fields: connID, opts +func (_m *Driver) Tx(connID string, opts driver.TxOptions) (string, error) { + ret := _m.Called(connID, opts) + + if len(ret) == 0 { + panic("no return value specified for Tx") + } + + var r0 string + var r1 error + if rf, ok := ret.Get(0).(func(string, driver.TxOptions) (string, error)); ok { + return rf(connID, opts) + } + if rf, ok := ret.Get(0).(func(string, driver.TxOptions) string); ok { + r0 = rf(connID, opts) + } else { + r0 = ret.Get(0).(string) + } + + if rf, ok := ret.Get(1).(func(string, driver.TxOptions) error); ok { + r1 = rf(connID, opts) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// TxCommit provides a mock function with given fields: txID +func (_m *Driver) TxCommit(txID string) error { + ret := _m.Called(txID) + + if len(ret) == 0 { + panic("no return value specified for TxCommit") + } + + var r0 error + if rf, ok := ret.Get(0).(func(string) error); ok { + r0 = rf(txID) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// TxRollback provides a mock function with given fields: txID +func (_m *Driver) TxRollback(txID string) error { + ret := _m.Called(txID) + + if len(ret) == 0 { + panic("no return value specified for TxRollback") + } + + var r0 error + if rf, ok := ret.Get(0).(func(string) error); ok { + r0 = rf(txID) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// NewDriver creates a new instance of Driver. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewDriver(t interface { + mock.TestingT + Cleanup(func()) +}) *Driver { + mock := &Driver{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/plugin/plugintest/hooks.go b/vendor/github.com/mattermost/mattermost/server/public/plugin/plugintest/hooks.go new file mode 100644 index 00000000..97b42da8 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/plugin/plugintest/hooks.go @@ -0,0 +1,657 @@ +// Code generated by mockery v2.53.4. DO NOT EDIT. + +// Regenerate this file using `make plugin-mocks`. + +package plugintest + +import ( + io "io" + http "net/http" + + mock "github.com/stretchr/testify/mock" + + model "github.com/mattermost/mattermost/server/public/model" + + plugin "github.com/mattermost/mattermost/server/public/plugin" + + saml2 "github.com/mattermost/gosaml2" +) + +// Hooks is an autogenerated mock type for the Hooks type +type Hooks struct { + mock.Mock +} + +// ChannelHasBeenCreated provides a mock function with given fields: c, channel +func (_m *Hooks) ChannelHasBeenCreated(c *plugin.Context, channel *model.Channel) { + _m.Called(c, channel) +} + +// ConfigurationWillBeSaved provides a mock function with given fields: newCfg +func (_m *Hooks) ConfigurationWillBeSaved(newCfg *model.Config) (*model.Config, error) { + ret := _m.Called(newCfg) + + if len(ret) == 0 { + panic("no return value specified for ConfigurationWillBeSaved") + } + + var r0 *model.Config + var r1 error + if rf, ok := ret.Get(0).(func(*model.Config) (*model.Config, error)); ok { + return rf(newCfg) + } + if rf, ok := ret.Get(0).(func(*model.Config) *model.Config); ok { + r0 = rf(newCfg) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Config) + } + } + + if rf, ok := ret.Get(1).(func(*model.Config) error); ok { + r1 = rf(newCfg) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// EmailNotificationWillBeSent provides a mock function with given fields: emailNotification +func (_m *Hooks) EmailNotificationWillBeSent(emailNotification *model.EmailNotification) (*model.EmailNotificationContent, string) { + ret := _m.Called(emailNotification) + + if len(ret) == 0 { + panic("no return value specified for EmailNotificationWillBeSent") + } + + var r0 *model.EmailNotificationContent + var r1 string + if rf, ok := ret.Get(0).(func(*model.EmailNotification) (*model.EmailNotificationContent, string)); ok { + return rf(emailNotification) + } + if rf, ok := ret.Get(0).(func(*model.EmailNotification) *model.EmailNotificationContent); ok { + r0 = rf(emailNotification) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.EmailNotificationContent) + } + } + + if rf, ok := ret.Get(1).(func(*model.EmailNotification) string); ok { + r1 = rf(emailNotification) + } else { + r1 = ret.Get(1).(string) + } + + return r0, r1 +} + +// ExecuteCommand provides a mock function with given fields: c, args +func (_m *Hooks) ExecuteCommand(c *plugin.Context, args *model.CommandArgs) (*model.CommandResponse, *model.AppError) { + ret := _m.Called(c, args) + + if len(ret) == 0 { + panic("no return value specified for ExecuteCommand") + } + + var r0 *model.CommandResponse + var r1 *model.AppError + if rf, ok := ret.Get(0).(func(*plugin.Context, *model.CommandArgs) (*model.CommandResponse, *model.AppError)); ok { + return rf(c, args) + } + if rf, ok := ret.Get(0).(func(*plugin.Context, *model.CommandArgs) *model.CommandResponse); ok { + r0 = rf(c, args) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.CommandResponse) + } + } + + if rf, ok := ret.Get(1).(func(*plugin.Context, *model.CommandArgs) *model.AppError); ok { + r1 = rf(c, args) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*model.AppError) + } + } + + return r0, r1 +} + +// FileWillBeUploaded provides a mock function with given fields: c, info, file, output +func (_m *Hooks) FileWillBeUploaded(c *plugin.Context, info *model.FileInfo, file io.Reader, output io.Writer) (*model.FileInfo, string) { + ret := _m.Called(c, info, file, output) + + if len(ret) == 0 { + panic("no return value specified for FileWillBeUploaded") + } + + var r0 *model.FileInfo + var r1 string + if rf, ok := ret.Get(0).(func(*plugin.Context, *model.FileInfo, io.Reader, io.Writer) (*model.FileInfo, string)); ok { + return rf(c, info, file, output) + } + if rf, ok := ret.Get(0).(func(*plugin.Context, *model.FileInfo, io.Reader, io.Writer) *model.FileInfo); ok { + r0 = rf(c, info, file, output) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.FileInfo) + } + } + + if rf, ok := ret.Get(1).(func(*plugin.Context, *model.FileInfo, io.Reader, io.Writer) string); ok { + r1 = rf(c, info, file, output) + } else { + r1 = ret.Get(1).(string) + } + + return r0, r1 +} + +// GenerateSupportData provides a mock function with given fields: c +func (_m *Hooks) GenerateSupportData(c *plugin.Context) ([]*model.FileData, error) { + ret := _m.Called(c) + + if len(ret) == 0 { + panic("no return value specified for GenerateSupportData") + } + + var r0 []*model.FileData + var r1 error + if rf, ok := ret.Get(0).(func(*plugin.Context) ([]*model.FileData, error)); ok { + return rf(c) + } + if rf, ok := ret.Get(0).(func(*plugin.Context) []*model.FileData); ok { + r0 = rf(c) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.FileData) + } + } + + if rf, ok := ret.Get(1).(func(*plugin.Context) error); ok { + r1 = rf(c) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Implemented provides a mock function with no fields +func (_m *Hooks) Implemented() ([]string, error) { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for Implemented") + } + + var r0 []string + var r1 error + if rf, ok := ret.Get(0).(func() ([]string, error)); ok { + return rf() + } + if rf, ok := ret.Get(0).(func() []string); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]string) + } + } + + if rf, ok := ret.Get(1).(func() error); ok { + r1 = rf() + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// MessageHasBeenDeleted provides a mock function with given fields: c, post +func (_m *Hooks) MessageHasBeenDeleted(c *plugin.Context, post *model.Post) { + _m.Called(c, post) +} + +// MessageHasBeenPosted provides a mock function with given fields: c, post +func (_m *Hooks) MessageHasBeenPosted(c *plugin.Context, post *model.Post) { + _m.Called(c, post) +} + +// MessageHasBeenUpdated provides a mock function with given fields: c, newPost, oldPost +func (_m *Hooks) MessageHasBeenUpdated(c *plugin.Context, newPost *model.Post, oldPost *model.Post) { + _m.Called(c, newPost, oldPost) +} + +// MessageWillBePosted provides a mock function with given fields: c, post +func (_m *Hooks) MessageWillBePosted(c *plugin.Context, post *model.Post) (*model.Post, string) { + ret := _m.Called(c, post) + + if len(ret) == 0 { + panic("no return value specified for MessageWillBePosted") + } + + var r0 *model.Post + var r1 string + if rf, ok := ret.Get(0).(func(*plugin.Context, *model.Post) (*model.Post, string)); ok { + return rf(c, post) + } + if rf, ok := ret.Get(0).(func(*plugin.Context, *model.Post) *model.Post); ok { + r0 = rf(c, post) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Post) + } + } + + if rf, ok := ret.Get(1).(func(*plugin.Context, *model.Post) string); ok { + r1 = rf(c, post) + } else { + r1 = ret.Get(1).(string) + } + + return r0, r1 +} + +// MessageWillBeUpdated provides a mock function with given fields: c, newPost, oldPost +func (_m *Hooks) MessageWillBeUpdated(c *plugin.Context, newPost *model.Post, oldPost *model.Post) (*model.Post, string) { + ret := _m.Called(c, newPost, oldPost) + + if len(ret) == 0 { + panic("no return value specified for MessageWillBeUpdated") + } + + var r0 *model.Post + var r1 string + if rf, ok := ret.Get(0).(func(*plugin.Context, *model.Post, *model.Post) (*model.Post, string)); ok { + return rf(c, newPost, oldPost) + } + if rf, ok := ret.Get(0).(func(*plugin.Context, *model.Post, *model.Post) *model.Post); ok { + r0 = rf(c, newPost, oldPost) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Post) + } + } + + if rf, ok := ret.Get(1).(func(*plugin.Context, *model.Post, *model.Post) string); ok { + r1 = rf(c, newPost, oldPost) + } else { + r1 = ret.Get(1).(string) + } + + return r0, r1 +} + +// MessagesWillBeConsumed provides a mock function with given fields: posts +func (_m *Hooks) MessagesWillBeConsumed(posts []*model.Post) []*model.Post { + ret := _m.Called(posts) + + if len(ret) == 0 { + panic("no return value specified for MessagesWillBeConsumed") + } + + var r0 []*model.Post + if rf, ok := ret.Get(0).(func([]*model.Post) []*model.Post); ok { + r0 = rf(posts) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.Post) + } + } + + return r0 +} + +// NotificationWillBePushed provides a mock function with given fields: pushNotification, userID +func (_m *Hooks) NotificationWillBePushed(pushNotification *model.PushNotification, userID string) (*model.PushNotification, string) { + ret := _m.Called(pushNotification, userID) + + if len(ret) == 0 { + panic("no return value specified for NotificationWillBePushed") + } + + var r0 *model.PushNotification + var r1 string + if rf, ok := ret.Get(0).(func(*model.PushNotification, string) (*model.PushNotification, string)); ok { + return rf(pushNotification, userID) + } + if rf, ok := ret.Get(0).(func(*model.PushNotification, string) *model.PushNotification); ok { + r0 = rf(pushNotification, userID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.PushNotification) + } + } + + if rf, ok := ret.Get(1).(func(*model.PushNotification, string) string); ok { + r1 = rf(pushNotification, userID) + } else { + r1 = ret.Get(1).(string) + } + + return r0, r1 +} + +// OnActivate provides a mock function with no fields +func (_m *Hooks) OnActivate() error { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for OnActivate") + } + + var r0 error + if rf, ok := ret.Get(0).(func() error); ok { + r0 = rf() + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// OnCloudLimitsUpdated provides a mock function with given fields: limits +func (_m *Hooks) OnCloudLimitsUpdated(limits *model.ProductLimits) { + _m.Called(limits) +} + +// OnConfigurationChange provides a mock function with no fields +func (_m *Hooks) OnConfigurationChange() error { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for OnConfigurationChange") + } + + var r0 error + if rf, ok := ret.Get(0).(func() error); ok { + r0 = rf() + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// OnDeactivate provides a mock function with no fields +func (_m *Hooks) OnDeactivate() error { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for OnDeactivate") + } + + var r0 error + if rf, ok := ret.Get(0).(func() error); ok { + r0 = rf() + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// OnInstall provides a mock function with given fields: c, event +func (_m *Hooks) OnInstall(c *plugin.Context, event model.OnInstallEvent) error { + ret := _m.Called(c, event) + + if len(ret) == 0 { + panic("no return value specified for OnInstall") + } + + var r0 error + if rf, ok := ret.Get(0).(func(*plugin.Context, model.OnInstallEvent) error); ok { + r0 = rf(c, event) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// OnPluginClusterEvent provides a mock function with given fields: c, ev +func (_m *Hooks) OnPluginClusterEvent(c *plugin.Context, ev model.PluginClusterEvent) { + _m.Called(c, ev) +} + +// OnSAMLLogin provides a mock function with given fields: c, user, assertion +func (_m *Hooks) OnSAMLLogin(c *plugin.Context, user *model.User, assertion *saml2.AssertionInfo) error { + ret := _m.Called(c, user, assertion) + + if len(ret) == 0 { + panic("no return value specified for OnSAMLLogin") + } + + var r0 error + if rf, ok := ret.Get(0).(func(*plugin.Context, *model.User, *saml2.AssertionInfo) error); ok { + r0 = rf(c, user, assertion) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// OnSendDailyTelemetry provides a mock function with no fields +func (_m *Hooks) OnSendDailyTelemetry() { + _m.Called() +} + +// OnSharedChannelsAttachmentSyncMsg provides a mock function with given fields: fi, post, rc +func (_m *Hooks) OnSharedChannelsAttachmentSyncMsg(fi *model.FileInfo, post *model.Post, rc *model.RemoteCluster) error { + ret := _m.Called(fi, post, rc) + + if len(ret) == 0 { + panic("no return value specified for OnSharedChannelsAttachmentSyncMsg") + } + + var r0 error + if rf, ok := ret.Get(0).(func(*model.FileInfo, *model.Post, *model.RemoteCluster) error); ok { + r0 = rf(fi, post, rc) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// OnSharedChannelsPing provides a mock function with given fields: rc +func (_m *Hooks) OnSharedChannelsPing(rc *model.RemoteCluster) bool { + ret := _m.Called(rc) + + if len(ret) == 0 { + panic("no return value specified for OnSharedChannelsPing") + } + + var r0 bool + if rf, ok := ret.Get(0).(func(*model.RemoteCluster) bool); ok { + r0 = rf(rc) + } else { + r0 = ret.Get(0).(bool) + } + + return r0 +} + +// OnSharedChannelsProfileImageSyncMsg provides a mock function with given fields: user, rc +func (_m *Hooks) OnSharedChannelsProfileImageSyncMsg(user *model.User, rc *model.RemoteCluster) error { + ret := _m.Called(user, rc) + + if len(ret) == 0 { + panic("no return value specified for OnSharedChannelsProfileImageSyncMsg") + } + + var r0 error + if rf, ok := ret.Get(0).(func(*model.User, *model.RemoteCluster) error); ok { + r0 = rf(user, rc) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// OnSharedChannelsSyncMsg provides a mock function with given fields: msg, rc +func (_m *Hooks) OnSharedChannelsSyncMsg(msg *model.SyncMsg, rc *model.RemoteCluster) (model.SyncResponse, error) { + ret := _m.Called(msg, rc) + + if len(ret) == 0 { + panic("no return value specified for OnSharedChannelsSyncMsg") + } + + var r0 model.SyncResponse + var r1 error + if rf, ok := ret.Get(0).(func(*model.SyncMsg, *model.RemoteCluster) (model.SyncResponse, error)); ok { + return rf(msg, rc) + } + if rf, ok := ret.Get(0).(func(*model.SyncMsg, *model.RemoteCluster) model.SyncResponse); ok { + r0 = rf(msg, rc) + } else { + r0 = ret.Get(0).(model.SyncResponse) + } + + if rf, ok := ret.Get(1).(func(*model.SyncMsg, *model.RemoteCluster) error); ok { + r1 = rf(msg, rc) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// OnWebSocketConnect provides a mock function with given fields: webConnID, userID +func (_m *Hooks) OnWebSocketConnect(webConnID string, userID string) { + _m.Called(webConnID, userID) +} + +// OnWebSocketDisconnect provides a mock function with given fields: webConnID, userID +func (_m *Hooks) OnWebSocketDisconnect(webConnID string, userID string) { + _m.Called(webConnID, userID) +} + +// PreferencesHaveChanged provides a mock function with given fields: c, preferences +func (_m *Hooks) PreferencesHaveChanged(c *plugin.Context, preferences []model.Preference) { + _m.Called(c, preferences) +} + +// ReactionHasBeenAdded provides a mock function with given fields: c, reaction +func (_m *Hooks) ReactionHasBeenAdded(c *plugin.Context, reaction *model.Reaction) { + _m.Called(c, reaction) +} + +// ReactionHasBeenRemoved provides a mock function with given fields: c, reaction +func (_m *Hooks) ReactionHasBeenRemoved(c *plugin.Context, reaction *model.Reaction) { + _m.Called(c, reaction) +} + +// RunDataRetention provides a mock function with given fields: nowTime, batchSize +func (_m *Hooks) RunDataRetention(nowTime int64, batchSize int64) (int64, error) { + ret := _m.Called(nowTime, batchSize) + + if len(ret) == 0 { + panic("no return value specified for RunDataRetention") + } + + var r0 int64 + var r1 error + if rf, ok := ret.Get(0).(func(int64, int64) (int64, error)); ok { + return rf(nowTime, batchSize) + } + if rf, ok := ret.Get(0).(func(int64, int64) int64); ok { + r0 = rf(nowTime, batchSize) + } else { + r0 = ret.Get(0).(int64) + } + + if rf, ok := ret.Get(1).(func(int64, int64) error); ok { + r1 = rf(nowTime, batchSize) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// ServeHTTP provides a mock function with given fields: c, w, r +func (_m *Hooks) ServeHTTP(c *plugin.Context, w http.ResponseWriter, r *http.Request) { + _m.Called(c, w, r) +} + +// ServeMetrics provides a mock function with given fields: c, w, r +func (_m *Hooks) ServeMetrics(c *plugin.Context, w http.ResponseWriter, r *http.Request) { + _m.Called(c, w, r) +} + +// UserHasBeenCreated provides a mock function with given fields: c, user +func (_m *Hooks) UserHasBeenCreated(c *plugin.Context, user *model.User) { + _m.Called(c, user) +} + +// UserHasBeenDeactivated provides a mock function with given fields: c, user +func (_m *Hooks) UserHasBeenDeactivated(c *plugin.Context, user *model.User) { + _m.Called(c, user) +} + +// UserHasJoinedChannel provides a mock function with given fields: c, channelMember, actor +func (_m *Hooks) UserHasJoinedChannel(c *plugin.Context, channelMember *model.ChannelMember, actor *model.User) { + _m.Called(c, channelMember, actor) +} + +// UserHasJoinedTeam provides a mock function with given fields: c, teamMember, actor +func (_m *Hooks) UserHasJoinedTeam(c *plugin.Context, teamMember *model.TeamMember, actor *model.User) { + _m.Called(c, teamMember, actor) +} + +// UserHasLeftChannel provides a mock function with given fields: c, channelMember, actor +func (_m *Hooks) UserHasLeftChannel(c *plugin.Context, channelMember *model.ChannelMember, actor *model.User) { + _m.Called(c, channelMember, actor) +} + +// UserHasLeftTeam provides a mock function with given fields: c, teamMember, actor +func (_m *Hooks) UserHasLeftTeam(c *plugin.Context, teamMember *model.TeamMember, actor *model.User) { + _m.Called(c, teamMember, actor) +} + +// UserHasLoggedIn provides a mock function with given fields: c, user +func (_m *Hooks) UserHasLoggedIn(c *plugin.Context, user *model.User) { + _m.Called(c, user) +} + +// UserWillLogIn provides a mock function with given fields: c, user +func (_m *Hooks) UserWillLogIn(c *plugin.Context, user *model.User) string { + ret := _m.Called(c, user) + + if len(ret) == 0 { + panic("no return value specified for UserWillLogIn") + } + + var r0 string + if rf, ok := ret.Get(0).(func(*plugin.Context, *model.User) string); ok { + r0 = rf(c, user) + } else { + r0 = ret.Get(0).(string) + } + + return r0 +} + +// WebSocketMessageHasBeenPosted provides a mock function with given fields: webConnID, userID, req +func (_m *Hooks) WebSocketMessageHasBeenPosted(webConnID string, userID string, req *model.WebSocketRequest) { + _m.Called(webConnID, userID, req) +} + +// NewHooks creates a new instance of Hooks. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewHooks(t interface { + mock.TestingT + Cleanup(func()) +}) *Hooks { + mock := &Hooks{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/plugin/plugintest/mock/mock.go b/vendor/github.com/mattermost/mattermost/server/public/plugin/plugintest/mock/mock.go new file mode 100644 index 00000000..8e6c5cf3 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/plugin/plugintest/mock/mock.go @@ -0,0 +1,33 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +// This package provides aliases for the contents of "github.com/stretchr/testify/mock". Because +// external packages can't import our vendored dependencies, this is necessary for them to be able +// to fully utilize the plugintest package. +package mock + +import ( + "github.com/stretchr/testify/mock" +) + +const ( + Anything = mock.Anything +) + +type Arguments = mock.Arguments +type AnythingOfTypeArgument = mock.AnythingOfTypeArgument +type Call = mock.Call +type Mock = mock.Mock +type TestingT = mock.TestingT + +func AnythingOfType(t string) AnythingOfTypeArgument { + return mock.AnythingOfType(t) +} + +func AssertExpectationsForObjects(t TestingT, testObjects ...any) bool { + return mock.AssertExpectationsForObjects(t, testObjects...) +} + +func MatchedBy(fn any) any { + return mock.MatchedBy(fn) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/plugin/stringifier.go b/vendor/github.com/mattermost/mattermost/server/public/plugin/stringifier.go new file mode 100644 index 00000000..4d0bb7a1 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/plugin/stringifier.go @@ -0,0 +1,31 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package plugin + +import ( + "fmt" +) + +func stringify(objects []any) []string { + stringified := make([]string, len(objects)) + for i, object := range objects { + stringified[i] = fmt.Sprintf("%+v", object) + } + return stringified +} + +func toObjects(strings []string) []any { + if strings == nil { + return nil + } + objects := make([]any, len(strings)) + for i, string := range strings { + objects[i] = string + } + return objects +} + +func stringifyToObjects(objects []any) []any { + return toObjects(stringify(objects)) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/plugin/supervisor.go b/vendor/github.com/mattermost/mattermost/server/public/plugin/supervisor.go new file mode 100644 index 00000000..48ada556 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/plugin/supervisor.go @@ -0,0 +1,253 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package plugin + +import ( + "crypto/sha256" + "fmt" + "io" + "os" + "os/exec" + "path/filepath" + "runtime" + "strings" + "sync" + "time" + + plugin "github.com/hashicorp/go-plugin" + "github.com/pkg/errors" + + "github.com/mattermost/mattermost/server/public/model" + "github.com/mattermost/mattermost/server/public/shared/mlog" +) + +type supervisor struct { + lock sync.RWMutex + pluginID string + appDriver AppDriver + client *plugin.Client + hooks Hooks + implemented [TotalHooksID]bool + hooksClient *hooksRPCClient + isReattached bool +} + +type driverForPlugin struct { + AppDriver + pluginID string +} + +func (d *driverForPlugin) Conn(isMaster bool) (string, error) { + return d.AppDriver.ConnWithPluginID(isMaster, d.pluginID) +} + +func WithExecutableFromManifest(pluginInfo *model.BundleInfo) func(*supervisor, *plugin.ClientConfig) error { + return func(_ *supervisor, clientConfig *plugin.ClientConfig) error { + executable := pluginInfo.Manifest.GetExecutableForRuntime(runtime.GOOS, runtime.GOARCH) + if executable == "" { + return fmt.Errorf("backend executable not found for environment: %s/%s", runtime.GOOS, runtime.GOARCH) + } + + executable = filepath.Clean(filepath.Join(".", executable)) + if strings.HasPrefix(executable, "..") { + return fmt.Errorf("invalid backend executable: %s", executable) + } + + executable = filepath.Join(pluginInfo.Path, executable) + + cmd := exec.Command(executable) + + // This doesn't add more security than before + // but removes the SecureConfig is nil warning. + // https://mattermost.atlassian.net/browse/MM-49167 + pluginChecksum, err := getPluginExecutableChecksum(executable) + if err != nil { + return errors.Wrapf(err, "unable to generate plugin checksum") + } + + clientConfig.Cmd = cmd + clientConfig.SecureConfig = &plugin.SecureConfig{ + Checksum: pluginChecksum, + Hash: sha256.New(), + } + + return nil + } +} + +func WithReattachConfig(pluginReattachConfig *model.PluginReattachConfig) func(*supervisor, *plugin.ClientConfig) error { + return func(sup *supervisor, clientConfig *plugin.ClientConfig) error { + clientConfig.Reattach = pluginReattachConfig.ToHashicorpPluginReattachmentConfig() + sup.isReattached = true + + return nil + } +} + +func newSupervisor(pluginInfo *model.BundleInfo, apiImpl API, driver AppDriver, parentLogger *mlog.Logger, metrics metricsInterface, opts ...func(*supervisor, *plugin.ClientConfig) error) (retSupervisor *supervisor, retErr error) { + sup := supervisor{ + pluginID: pluginInfo.Manifest.Id, + } + if driver != nil { + sup.appDriver = &driverForPlugin{AppDriver: driver, pluginID: pluginInfo.Manifest.Id} + } + + defer func() { + if retErr != nil { + sup.Shutdown() + } + }() + + wrappedLogger := pluginInfo.WrapLogger(parentLogger) + + hclogAdaptedLogger := &hclogAdapter{ + wrappedLogger: wrappedLogger, + extrasKey: "wrapped_extras", + } + + pluginMap := map[string]plugin.Plugin{ + "hooks": &hooksPlugin{ + log: wrappedLogger, + driverImpl: sup.appDriver, + apiImpl: &apiTimerLayer{pluginInfo.Manifest.Id, apiImpl, metrics}, + }, + } + + clientConfig := &plugin.ClientConfig{ + HandshakeConfig: handshake, + Plugins: pluginMap, + SyncStdout: wrappedLogger.With(mlog.String("source", "plugin_stdout")).StdLogWriter(), + SyncStderr: wrappedLogger.With(mlog.String("source", "plugin_stderr")).StdLogWriter(), + Logger: hclogAdaptedLogger, + StartTimeout: time.Second * 3, + } + for _, opt := range opts { + err := opt(&sup, clientConfig) + if err != nil { + return nil, errors.Wrap(err, "failed to apply option") + } + } + + sup.client = plugin.NewClient(clientConfig) + + rpcClient, err := sup.client.Client() + if err != nil { + return nil, err + } + + raw, err := rpcClient.Dispense("hooks") + if err != nil { + return nil, err + } + + c, ok := raw.(*hooksRPCClient) + if ok { + sup.hooksClient = c + } + + sup.hooks = &hooksTimerLayer{pluginInfo.Manifest.Id, raw.(Hooks), metrics} + + impl, err := sup.hooks.Implemented() + if err != nil { + return nil, err + } + for _, hookName := range impl { + if hookId, ok := hookNameToId[hookName]; ok { + sup.implemented[hookId] = true + } + } + + return &sup, nil +} + +func (sup *supervisor) Shutdown() { + sup.lock.RLock() + defer sup.lock.RUnlock() + if sup.client != nil { + // For reattached plugins, Kill() is mostly a no-op, so manually clean up the + // underlying rpcClient. This might be something to upstream unless we're doing + // something else wrong. + if sup.isReattached { + rpcClient, err := sup.client.Client() + if err != nil { + mlog.Warn("Failed to obtain rpcClient on Shutdown") + } else { + if err = rpcClient.Close(); err != nil { + mlog.Warn("Failed to close rpcClient on Shutdown") + } + } + } + + sup.client.Kill() + } + + // Wait for API RPC server and DB RPC server to exit. + // And then shutdown conns. + if sup.hooksClient != nil { + sup.hooksClient.doneWg.Wait() + if sup.appDriver != nil { + sup.appDriver.ShutdownConns(sup.pluginID) + } + } +} + +func (sup *supervisor) Hooks() Hooks { + sup.lock.RLock() + defer sup.lock.RUnlock() + return sup.hooks +} + +// PerformHealthCheck checks the plugin through an an RPC ping. +func (sup *supervisor) PerformHealthCheck() error { + // No need for a lock here because Ping is read-locked. + if pingErr := sup.Ping(); pingErr != nil { + for pingFails := 1; pingFails < HealthCheckPingFailLimit; pingFails++ { + pingErr = sup.Ping() + if pingErr == nil { + break + } + } + if pingErr != nil { + return fmt.Errorf("plugin RPC connection is not responding") + } + } + + return nil +} + +// Ping checks that the RPC connection with the plugin is alive and healthy. +func (sup *supervisor) Ping() error { + sup.lock.RLock() + defer sup.lock.RUnlock() + client, err := sup.client.Client() + if err != nil { + return err + } + + return client.Ping() +} + +func (sup *supervisor) Implements(hookId int) bool { + sup.lock.RLock() + defer sup.lock.RUnlock() + return sup.implemented[hookId] +} + +func getPluginExecutableChecksum(executablePath string) ([]byte, error) { + pathHash := sha256.New() + file, err := os.Open(executablePath) + + if err != nil { + return nil, err + } + + defer file.Close() + + _, err = io.Copy(pathHash, file) + if err != nil { + return nil, err + } + + return pathHash.Sum(nil), nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/plugin/utils/test_files_compiler.go b/vendor/github.com/mattermost/mattermost/server/public/plugin/utils/test_files_compiler.go new file mode 100644 index 00000000..626a0a4c --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/plugin/utils/test_files_compiler.go @@ -0,0 +1,85 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package utils + +import ( + "bytes" + "os" + "os/exec" + "path/filepath" + "runtime" + "testing" + + "github.com/stretchr/testify/require" +) + +func CompileGo(t *testing.T, sourceCode, outputPath string) { + compileGo(t, "go", sourceCode, outputPath) +} + +func CompileGoVersion(t *testing.T, goVersion, sourceCode, outputPath string) { + var goBin string + if goVersion != "" { + goBin = os.Getenv("GOBIN") + } + compileGo(t, filepath.Join(goBin, "go"+goVersion), sourceCode, outputPath) +} + +func compileGo(t *testing.T, goBin, sourceCode, outputPath string) { + dir, err := os.MkdirTemp(".", "") + require.NoError(t, err) + defer os.RemoveAll(dir) + + dir, err = filepath.Abs(dir) + require.NoError(t, err) + + // Write out main.go given the source code. + main := filepath.Join(dir, "main.go") + err = os.WriteFile(main, []byte(sourceCode), 0600) + require.NoError(t, err) + + _, sourceFile, _, ok := runtime.Caller(0) + require.True(t, ok) + serverPath := filepath.Dir(filepath.Dir(sourceFile)) + + out := &bytes.Buffer{} + cmd := exec.Command(goBin, "build", "-o", outputPath, main) + cmd.Dir = serverPath + cmd.Stdout = out + cmd.Stderr = out + err = cmd.Run() + if err != nil { + t.Log("Go compile errors:\n", out.String()) + } + require.NoError(t, err, "failed to compile go") +} + +func CompileGoTest(t *testing.T, sourceCode, outputPath string) { + dir, err := os.MkdirTemp(".", "") + require.NoError(t, err) + defer os.RemoveAll(dir) + + dir, err = filepath.Abs(dir) + require.NoError(t, err) + + // Write out main.go given the source code. + main := filepath.Join(dir, "main_test.go") + err = os.WriteFile(main, []byte(sourceCode), 0600) + require.NoError(t, err) + + _, sourceFile, _, ok := runtime.Caller(0) + require.True(t, ok) + serverPath := filepath.Dir(filepath.Dir(sourceFile)) + + out := &bytes.Buffer{} + cmd := exec.Command("go", "test", "-c", "-o", outputPath, main) + cmd.Dir = serverPath + cmd.Stdout = out + cmd.Stderr = out + err = cmd.Run() + if err != nil { + t.Log("Go compile errors:\n", out.String()) + } + require.NoError(t, err, "failed to compile go") +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/pluginapi/bot.go b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/bot.go new file mode 100644 index 00000000..ab0a3f0e --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/bot.go @@ -0,0 +1,220 @@ +package pluginapi + +import ( + "os" + "path/filepath" + + "github.com/pkg/errors" + + "github.com/mattermost/mattermost/server/public/model" + "github.com/mattermost/mattermost/server/public/plugin" + "github.com/mattermost/mattermost/server/public/pluginapi/cluster" +) + +const ( + internalKeyPrefix = "mmi_" + botUserKey = internalKeyPrefix + "botid" + botEnsureMutexKey = internalKeyPrefix + "bot_ensure" +) + +// BotService exposes methods to manipulate bots. +type BotService struct { + api plugin.API +} + +// Get returns a bot by botUserID. +// +// Minimum server version: 5.10 +func (b *BotService) Get(botUserID string, includeDeleted bool) (*model.Bot, error) { + bot, appErr := b.api.GetBot(botUserID, includeDeleted) + + return bot, normalizeAppErr(appErr) +} + +// BotListOption is an option to configure a bot List() request. +type BotListOption func(*model.BotGetOptions) + +// BotOwner option configures bot list request to only retrieve the bots that matches with +// owner's id. +func BotOwner(id string) BotListOption { + return func(o *model.BotGetOptions) { + o.OwnerId = id + } +} + +// BotIncludeDeleted option configures bot list request to also retrieve the deleted bots. +func BotIncludeDeleted() BotListOption { + return func(o *model.BotGetOptions) { + o.IncludeDeleted = true + } +} + +// BotOnlyOrphans option configures bot list request to only retrieve orphan bots. +func BotOnlyOrphans() BotListOption { + return func(o *model.BotGetOptions) { + o.OnlyOrphaned = true + } +} + +// List returns a list of bots by page, count and options. +// +// Minimum server version: 5.10 +func (b *BotService) List(page, perPage int, options ...BotListOption) ([]*model.Bot, error) { + opts := &model.BotGetOptions{ + Page: page, + PerPage: perPage, + } + for _, o := range options { + o(opts) + } + bots, appErr := b.api.GetBots(opts) + + return bots, normalizeAppErr(appErr) +} + +// Create creates the bot and corresponding user. +// +// Minimum server version: 5.10 +func (b *BotService) Create(bot *model.Bot) error { + createdBot, appErr := b.api.CreateBot(bot) + if appErr != nil { + return normalizeAppErr(appErr) + } + + *bot = *createdBot + + return nil +} + +// Patch applies the given patch to the bot and corresponding user. +// +// Minimum server version: 5.10 +func (b *BotService) Patch(botUserID string, botPatch *model.BotPatch) (*model.Bot, error) { + bot, appErr := b.api.PatchBot(botUserID, botPatch) + + return bot, normalizeAppErr(appErr) +} + +// UpdateActive marks a bot as active or inactive, along with its corresponding user. +// +// Minimum server version: 5.10 +func (b *BotService) UpdateActive(botUserID string, isActive bool) (*model.Bot, error) { + bot, appErr := b.api.UpdateBotActive(botUserID, isActive) + + return bot, normalizeAppErr(appErr) +} + +// DeletePermanently permanently deletes a bot and its corresponding user. +// +// Minimum server version: 5.10 +func (b *BotService) DeletePermanently(botUserID string) error { + return normalizeAppErr(b.api.PermanentDeleteBot(botUserID)) +} + +type ensureBotOptions struct { + ProfileImagePath string + ProfileImageBytes []byte +} + +type EnsureBotOption func(*ensureBotOptions) + +// ProfileImagePath configures EnsureBot to set a profile image from the given path. +// +// Using this option overrides any previously set ProfileImageBytes option. +func ProfileImagePath(path string) EnsureBotOption { + return func(args *ensureBotOptions) { + args.ProfileImagePath = path + args.ProfileImageBytes = nil + } +} + +// ProfileImageBytes configures EnsureBot to set a profile image from the given bytes. +// +// Using this option overrides any previously set ProfileImagePath option. +func ProfileImageBytes(bytes []byte) EnsureBotOption { + return func(args *ensureBotOptions) { + args.ProfileImageBytes = bytes + args.ProfileImagePath = "" + } +} + +// EnsureBot either returns an existing bot user matching the given bot, or creates a bot user from the given bot. +// A profile image or icon image may be optionally passed in to be set for the existing or newly created bot. +// Returns the id of the resulting bot. +// EnsureBot can safely be called multiple instances of a plugin concurrently. +// +// Minimum server version: 5.10 +func (b *BotService) EnsureBot(bot *model.Bot, options ...EnsureBotOption) (string, error) { + m, err := cluster.NewMutex(b.api, botEnsureMutexKey) + if err != nil { + return "", errors.Wrap(err, "failed to create mutex") + } + + return b.ensureBot(m, bot, options...) +} + +type mutex interface { + Lock() + Unlock() +} + +func (b *BotService) ensureBot(m mutex, bot *model.Bot, options ...EnsureBotOption) (string, error) { + err := ensureServerVersion(b.api, "5.10.0") + if err != nil { + return "", errors.Wrap(err, "failed to ensure bot") + } + + // Default options + o := &ensureBotOptions{ + ProfileImagePath: "", + } + + for _, setter := range options { + setter(o) + } + + botID, err := b.ensureBotUser(m, bot) + if err != nil { + return "", err + } + + if o.ProfileImagePath != "" { + imageBytes, err := b.readFile(o.ProfileImagePath) + if err != nil { + return "", errors.Wrap(err, "failed to read profile image") + } + appErr := b.api.SetProfileImage(botID, imageBytes) + if appErr != nil { + return "", errors.Wrap(appErr, "failed to set profile image") + } + } else if len(o.ProfileImageBytes) > 0 { + appErr := b.api.SetProfileImage(botID, o.ProfileImageBytes) + if appErr != nil { + return "", errors.Wrap(appErr, "failed to set profile image") + } + } + + return botID, nil +} + +func (b *BotService) ensureBotUser(m mutex, bot *model.Bot) (retBotID string, retErr error) { + // Lock to prevent two plugins from racing to create the bot account + m.Lock() + defer m.Unlock() + + return b.api.EnsureBotUser(bot) +} + +func (b *BotService) readFile(path string) ([]byte, error) { + bundlePath, err := b.api.GetBundlePath() + if err != nil { + return nil, errors.Wrap(err, "failed to get bundle path") + } + + imageBytes, err := os.ReadFile(filepath.Join(bundlePath, path)) + if err != nil { + return nil, errors.Wrap(err, "failed to read image") + } + + return imageBytes, nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/pluginapi/channel.go b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/channel.go new file mode 100644 index 00000000..405e22f2 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/channel.go @@ -0,0 +1,286 @@ +package pluginapi + +import ( + "net/http" + "time" + + "github.com/pkg/errors" + + "github.com/mattermost/mattermost/server/public/model" + "github.com/mattermost/mattermost/server/public/plugin" +) + +// ChannelService exposes methods to manipulate channels. +type ChannelService struct { + api plugin.API +} + +// Get gets a channel. +// +// Minimum server version: 5.2 +func (c *ChannelService) Get(channelID string) (*model.Channel, error) { + channel, appErr := c.api.GetChannel(channelID) + + return channel, normalizeAppErr(appErr) +} + +// GetByName gets a channel by its name, given a team id. +// +// Minimum server version: 5.2 +func (c *ChannelService) GetByName(teamID, channelName string, includeDeleted bool) (*model.Channel, error) { + channel, appErr := c.api.GetChannelByName(teamID, channelName, includeDeleted) + + return channel, normalizeAppErr(appErr) +} + +// GetDirect gets a direct message channel. +// +// Note that if the channel does not exist it will create it. +// +// Minimum server version: 5.2 +func (c *ChannelService) GetDirect(userID1, userID2 string) (*model.Channel, error) { + channel, appErr := c.api.GetDirectChannel(userID1, userID2) + + return channel, normalizeAppErr(appErr) +} + +// GetGroup gets a group message channel. +// +// Note that if the channel does not exist it will create it. +// +// Minimum server version: 5.2 +func (c *ChannelService) GetGroup(userIDs []string) (*model.Channel, error) { + channel, appErr := c.api.GetGroupChannel(userIDs) + + return channel, normalizeAppErr(appErr) +} + +// GetByNameForTeamName gets a channel by its name, given a team name. +// +// Minimum server version: 5.2 +func (c *ChannelService) GetByNameForTeamName(teamName, channelName string, includeDeleted bool) (*model.Channel, error) { + channel, appErr := c.api.GetChannelByNameForTeamName(teamName, channelName, includeDeleted) + + return channel, normalizeAppErr(appErr) +} + +// ListForTeamForUser gets a list of channels for given user ID in given team ID. +// +// Minimum server version: 5.6 +func (c *ChannelService) ListForTeamForUser(teamID, userID string, includeDeleted bool) ([]*model.Channel, error) { + channels, appErr := c.api.GetChannelsForTeamForUser(teamID, userID, includeDeleted) + + return channels, normalizeAppErr(appErr) +} + +// ListPublicChannelsForTeam gets a list of all channels. +// +// Minimum server version: 5.2 +func (c *ChannelService) ListPublicChannelsForTeam(teamID string, page, perPage int) ([]*model.Channel, error) { + channels, appErr := c.api.GetPublicChannelsForTeam(teamID, page, perPage) + + return channels, normalizeAppErr(appErr) +} + +// Search returns the channels on a team matching the provided search term. +// +// Minimum server version: 5.6 +func (c *ChannelService) Search(teamID, term string) ([]*model.Channel, error) { + channels, appErr := c.api.SearchChannels(teamID, term) + + return channels, normalizeAppErr(appErr) +} + +// Create creates a channel. +// +// Minimum server version: 5.2 +func (c *ChannelService) Create(channel *model.Channel) error { + createdChannel, appErr := c.api.CreateChannel(channel) + if appErr != nil { + return normalizeAppErr(appErr) + } + + *channel = *createdChannel + + return c.waitForChannelCreation(channel.Id) +} + +// Update updates a channel. +// +// Minimum server version: 5.2 +func (c *ChannelService) Update(channel *model.Channel) error { + updatedChannel, appErr := c.api.UpdateChannel(channel) + if appErr != nil { + return normalizeAppErr(appErr) + } + + *channel = *updatedChannel + + return nil +} + +// Delete deletes a channel. +// +// Minimum server version: 5.2 +func (c *ChannelService) Delete(channelID string) error { + return normalizeAppErr(c.api.DeleteChannel(channelID)) +} + +// GetChannelStats gets statistics for a channel. +// +// Minimum server version: 5.6 +func (c *ChannelService) GetChannelStats(channelID string) (*model.ChannelStats, error) { + channelStats, appErr := c.api.GetChannelStats(channelID) + + return channelStats, normalizeAppErr(appErr) +} + +// GetMember gets a channel membership for a user. +// +// Minimum server version: 5.2 +func (c *ChannelService) GetMember(channelID, userID string) (*model.ChannelMember, error) { + channelMember, appErr := c.api.GetChannelMember(channelID, userID) + + return channelMember, normalizeAppErr(appErr) +} + +// ListMembers gets a channel membership for all users. +// +// Minimum server version: 5.6 +func (c *ChannelService) ListMembers(channelID string, page, perPage int) ([]*model.ChannelMember, error) { + channelMembers, appErr := c.api.GetChannelMembers(channelID, page, perPage) + + return channelMembersToChannelMemberSlice(channelMembers), normalizeAppErr(appErr) +} + +// ListMembersByIDs gets a channel membership for a particular User +// +// Minimum server version: 5.6 +func (c *ChannelService) ListMembersByIDs(channelID string, userIDs []string) ([]*model.ChannelMember, error) { + channelMembers, appErr := c.api.GetChannelMembersByIds(channelID, userIDs) + + return channelMembersToChannelMemberSlice(channelMembers), normalizeAppErr(appErr) +} + +// ListMembersForUser returns all channel memberships on a team for a user. +// +// Minimum server version: 5.10 +func (c *ChannelService) ListMembersForUser(teamID, userID string, page, perPage int) ([]*model.ChannelMember, error) { + channelMembers, appErr := c.api.GetChannelMembersForUser(teamID, userID, page, perPage) + + return channelMembers, normalizeAppErr(appErr) +} + +// AddMember joins a user to a channel (as if they joined themselves). +// This means the user will not receive notifications for joining the channel. +// +// Minimum server version: 5.2 +func (c *ChannelService) AddMember(channelID, userID string) (*model.ChannelMember, error) { + channelMember, appErr := c.api.AddChannelMember(channelID, userID) + + return channelMember, normalizeAppErr(appErr) +} + +// AddUser adds a user to a channel as if the specified user had invited them. +// This means the user will receive the regular notifications for being added to the channel. +// +// Minimum server version: 5.18 +func (c *ChannelService) AddUser(channelID, userID, asUserID string) (*model.ChannelMember, error) { + channelMember, appErr := c.api.AddUserToChannel(channelID, userID, asUserID) + + return channelMember, normalizeAppErr(appErr) +} + +// DeleteMember deletes a channel membership for a user. +// +// Minimum server version: 5.2 +func (c *ChannelService) DeleteMember(channelID, userID string) error { + appErr := c.api.DeleteChannelMember(channelID, userID) + + return normalizeAppErr(appErr) +} + +// UpdateChannelMemberRoles updates a user's roles for a channel. +// +// Minimum server version: 5.2 +func (c *ChannelService) UpdateChannelMemberRoles(channelID, userID, newRoles string) (*model.ChannelMember, error) { + channelMember, appErr := c.api.UpdateChannelMemberRoles(channelID, userID, newRoles) + + return channelMember, normalizeAppErr(appErr) +} + +// UpdateChannelMemberNotifications updates a user's notification properties for a channel. +// +// Minimum server version: 5.2 +func (c *ChannelService) UpdateChannelMemberNotifications(channelID, userID string, notifications map[string]string) (*model.ChannelMember, error) { + channelMember, appErr := c.api.UpdateChannelMemberNotifications(channelID, userID, notifications) + + return channelMember, normalizeAppErr(appErr) +} + +// CreateSidebarCategory creates a new sidebar category for a set of channels. +// +// Minimum server version: 5.38 +func (c *ChannelService) CreateSidebarCategory( + userID, teamID string, newCategory *model.SidebarCategoryWithChannels) error { + category, appErr := c.api.CreateChannelSidebarCategory(userID, teamID, newCategory) + if appErr != nil { + return normalizeAppErr(appErr) + } + *newCategory = *category + + return nil +} + +// GetSidebarCategories returns sidebar categories. +// +// Minimum server version: 5.38 +func (c *ChannelService) GetSidebarCategories(userID, teamID string) (*model.OrderedSidebarCategories, error) { + categories, appErr := c.api.GetChannelSidebarCategories(userID, teamID) + + return categories, normalizeAppErr(appErr) +} + +// UpdateSidebarCategories updates the channel sidebar categories. +// +// Minimum server version: 5.38 +func (c *ChannelService) UpdateSidebarCategories( + userID, teamID string, categories []*model.SidebarCategoryWithChannels) error { + updatedCategories, appErr := c.api.UpdateChannelSidebarCategories(userID, teamID, categories) + if appErr != nil { + return normalizeAppErr(appErr) + } + copy(categories, updatedCategories) + + return nil +} + +func (c *ChannelService) waitForChannelCreation(channelID string) error { + if len(c.api.GetConfig().SqlSettings.DataSourceReplicas) == 0 { + return nil + } + + now := time.Now() + + for time.Since(now) < 1500*time.Millisecond { + time.Sleep(100 * time.Millisecond) + + if _, err := c.api.GetChannel(channelID); err == nil { + // Channel found + return nil + } else if err.StatusCode != http.StatusNotFound { + return err + } + } + + return errors.Errorf("giving up waiting for channel creation, channelID=%s", channelID) +} + +func channelMembersToChannelMemberSlice(cm model.ChannelMembers) []*model.ChannelMember { + cmp := make([]*model.ChannelMember, len(cm)) + for i := range cm { + cmp[i] = &(cm)[i] + } + + return cmp +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/pluginapi/client.go b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/client.go new file mode 100644 index 00000000..1255c690 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/client.go @@ -0,0 +1,81 @@ +package pluginapi + +import ( + "github.com/blang/semver/v4" + "github.com/mattermost/mattermost/server/public/plugin" + "github.com/pkg/errors" +) + +// Client is a streamlined wrapper over the mattermost plugin API. +type Client struct { + api plugin.API + + Bot BotService + Channel ChannelService + Cluster ClusterService + Configuration ConfigurationService + SlashCommand SlashCommandService + OAuth OAuthService + Emoji EmojiService + File FileService + Frontend FrontendService + Group GroupService + KV KVService + Log LogService + Mail MailService + Plugin PluginService + Post PostService + Property PropertyService + Session SessionService + Store *StoreService + System SystemService + Team TeamService + User UserService +} + +// NewClient creates a new instance of Client. +// +// This client must only be created once per plugin to +// prevent reacquiring of resources. +func NewClient(api plugin.API, driver plugin.Driver) *Client { + return &Client{ + api: api, + + Bot: BotService{api: api}, + Channel: ChannelService{api: api}, + Cluster: ClusterService{api: api}, + Configuration: ConfigurationService{api: api}, + SlashCommand: SlashCommandService{api: api}, + OAuth: OAuthService{api: api}, + Emoji: EmojiService{api: api}, + File: FileService{api: api}, + Frontend: FrontendService{api: api}, + Group: GroupService{api: api}, + KV: KVService{api: api}, + Log: LogService{api: api}, + Mail: MailService{api: api}, + Plugin: PluginService{api: api}, + Post: PostService{api: api}, + Property: PropertyService{api: api}, + Session: SessionService{api: api}, + Store: &StoreService{ + api: api, + driver: driver, + }, + System: SystemService{api: api}, + Team: TeamService{api: api}, + User: UserService{api: api}, + } +} + +func ensureServerVersion(api plugin.API, required string) error { + serverVersion := api.GetServerVersion() + currentVersion := semver.MustParse(serverVersion) + requiredVersion := semver.MustParse(required) + + if currentVersion.LT(requiredVersion) { + return errors.Errorf("incompatible server version for plugin, minimum required version: %s, current version: %s", required, serverVersion) + } + + return nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/pluginapi/cluster.go b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/cluster.go new file mode 100644 index 00000000..23af8d7a --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/cluster.go @@ -0,0 +1,23 @@ +package pluginapi + +import ( + "github.com/mattermost/mattermost/server/public/model" + "github.com/mattermost/mattermost/server/public/plugin" +) + +// ClusterService exposes methods to interact with cluster nodes. +type ClusterService struct { + api plugin.API +} + +// ClusterService broadcasts a plugin event to all other running instances of +// the calling plugin that are present in the cluster. +// +// This method is used to allow plugin communication in a High-Availability cluster. +// The receiving side should implement the OnPluginClusterEvent hook +// to receive events sent through this method. +// +// Minimum server version: 5.36 +func (c *ClusterService) PublishPluginEvent(ev model.PluginClusterEvent, opts model.PluginClusterEventSendOptions) error { + return c.api.PublishPluginClusterEvent(ev, opts) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/pluginapi/cluster/doc.go b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/cluster/doc.go new file mode 100644 index 00000000..2024d02b --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/cluster/doc.go @@ -0,0 +1,3 @@ +// package cluster exposes synchronization primitives to ensure correct behavior across multiple +// plugin instances in a Mattermost cluster. +package cluster diff --git a/vendor/github.com/mattermost/mattermost/server/public/pluginapi/cluster/job.go b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/cluster/job.go new file mode 100644 index 00000000..b4e43b29 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/cluster/job.go @@ -0,0 +1,229 @@ +package cluster + +import ( + "encoding/json" + "sync" + "time" + + "github.com/pkg/errors" + + "github.com/mattermost/mattermost/server/public/model" +) + +const ( + // cronPrefix is used to namespace key values created for a job from other key values + // created by a plugin. + cronPrefix = "cron_" +) + +// JobPluginAPI is the plugin API interface required to schedule jobs. +type JobPluginAPI interface { + MutexPluginAPI + KVGet(key string) ([]byte, *model.AppError) + KVDelete(key string) *model.AppError + KVList(page, count int) ([]string, *model.AppError) +} + +// JobConfig defines the configuration of a scheduled job. +type JobConfig struct { + // Interval is the period of execution for the job. + Interval time.Duration +} + +// NextWaitInterval is a callback computing the next wait interval for a job. +type NextWaitInterval func(now time.Time, metadata JobMetadata) time.Duration + +// MakeWaitForInterval creates a function to scheduling a job to run on the given interval relative +// to the last finished timestamp. +// +// For example, if the job first starts at 12:01 PM, and is configured with interval 5 minutes, +// it will next run at: +// +// 12:06, 12:11, 12:16, ... +// +// If the job has not previously started, it will run immediately. +func MakeWaitForInterval(interval time.Duration) NextWaitInterval { + if interval == 0 { + panic("must specify non-zero ready interval") + } + + return func(now time.Time, metadata JobMetadata) time.Duration { + sinceLastFinished := now.Sub(metadata.LastFinished) + if sinceLastFinished < interval { + return interval - sinceLastFinished + } + + return 0 + } +} + +// MakeWaitForRoundedInterval creates a function, scheduling a job to run on the nearest rounded +// interval relative to the last finished timestamp. +// +// For example, if the job first starts at 12:04 PM, and is configured with interval 5 minutes, +// and is configured to round to 5 minute intervals, it will next run at: +// +// 12:05 PM, 12:10 PM, 12:15 PM, ... +// +// If the job has not previously started, it will run immediately. Note that this wait interval +// strategy does not guarantee a minimum interval between runs, only that subsequent runs will be +// scheduled on the rounded interval. +func MakeWaitForRoundedInterval(interval time.Duration) NextWaitInterval { + if interval == 0 { + panic("must specify non-zero ready interval") + } + + return func(now time.Time, metadata JobMetadata) time.Duration { + if metadata.LastFinished.IsZero() { + return 0 + } + + target := metadata.LastFinished.Add(interval).Truncate(interval) + untilTarget := target.Sub(now) + if untilTarget > 0 { + return untilTarget + } + + return 0 + } +} + +// Job is a scheduled job whose callback function is executed on a configured interval by at most +// one plugin instance at a time. +// +// Use scheduled jobs to perform background activity on a regular interval without having to +// explicitly coordinate with other instances of the same plugin that might repeat that effort. +type Job struct { + pluginAPI JobPluginAPI + key string + mutex *Mutex + nextWaitInterval NextWaitInterval + callback func() + + stopOnce sync.Once + stop chan bool + done chan bool +} + +// JobMetadata persists metadata about job execution. +type JobMetadata struct { + // LastFinished is the last time the job finished anywhere in the cluster. + LastFinished time.Time +} + +// Schedule creates a scheduled job. +func Schedule(pluginAPI JobPluginAPI, key string, nextWaitInterval NextWaitInterval, callback func()) (*Job, error) { + key = cronPrefix + key + + mutex, err := NewMutex(pluginAPI, key) + if err != nil { + return nil, errors.Wrap(err, "failed to create job mutex") + } + + job := &Job{ + pluginAPI: pluginAPI, + key: key, + mutex: mutex, + nextWaitInterval: nextWaitInterval, + callback: callback, + stop: make(chan bool), + done: make(chan bool), + } + + go job.run() + + return job, nil +} + +// readMetadata reads the job execution metadata from the kv store. +func (j *Job) readMetadata() (JobMetadata, error) { + data, appErr := j.pluginAPI.KVGet(j.key) + if appErr != nil { + return JobMetadata{}, errors.Wrap(appErr, "failed to read data") + } + + if data == nil { + return JobMetadata{}, nil + } + + var metadata JobMetadata + err := json.Unmarshal(data, &metadata) + if err != nil { + return JobMetadata{}, errors.Wrap(err, "failed to decode data") + } + + return metadata, nil +} + +// saveMetadata writes updated job execution metadata from the kv store. +// +// It is assumed that the job mutex is held, negating the need to require an atomic write. +func (j *Job) saveMetadata(metadata JobMetadata) error { + data, err := json.Marshal(metadata) + if err != nil { + return errors.Wrap(err, "failed to marshal data") + } + + ok, appErr := j.pluginAPI.KVSetWithOptions(j.key, data, model.PluginKVSetOptions{}) + if appErr != nil || !ok { + return errors.Wrap(appErr, "failed to set data") + } + + return nil +} + +// run attempts to run the scheduled job, guaranteeing only one instance is executing concurrently. +func (j *Job) run() { + defer close(j.done) + + var waitInterval time.Duration + + for { + select { + case <-j.stop: + return + case <-time.After(waitInterval): + } + + func() { + // Acquire the corresponding job lock and hold it throughout execution. + j.mutex.Lock() + defer j.mutex.Unlock() + + metadata, err := j.readMetadata() + if err != nil { + j.pluginAPI.LogError("failed to read job metadata", "err", err, "key", j.key) + waitInterval = nextWaitInterval(waitInterval, err) + return + } + + // Is it time to run the job? + waitInterval = j.nextWaitInterval(time.Now(), metadata) + if waitInterval > 0 { + return + } + + // Run the job + j.callback() + + metadata.LastFinished = time.Now() + + err = j.saveMetadata(metadata) + if err != nil { + j.pluginAPI.LogError("failed to write job data", "err", err, "key", j.key) + } + + waitInterval = j.nextWaitInterval(time.Now(), metadata) + }() + } +} + +// Close terminates a scheduled job, preventing it from being scheduled on this plugin instance. +func (j *Job) Close() error { + j.stopOnce.Do(func() { + close(j.stop) + }) + <-j.done + + return nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/pluginapi/cluster/job_once.go b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/cluster/job_once.go new file mode 100644 index 00000000..7b05bd86 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/cluster/job_once.go @@ -0,0 +1,235 @@ +package cluster + +import ( + "encoding/json" + "math/rand" + "sync" + "time" + + "github.com/pkg/errors" + + "github.com/mattermost/mattermost/server/public/model" +) + +const ( + // oncePrefix is used to namespace key values created for a scheduleOnce job + oncePrefix = "once_" + + // keysPerPage is the maximum number of keys to retrieve from the db per call + keysPerPage = 1000 + + // maxNumFails is the maximum number of KVStore read fails or failed attempts to run the + // callback until the scheduler cancels a job. + maxNumFails = 3 + + // waitAfterFail is the amount of time to wait after a failure + waitAfterFail = 1 * time.Second + + // pollNewJobsInterval is the amount of time to wait between polling the db for new scheduled jobs + pollNewJobsInterval = 5 * time.Minute + + // scheduleOnceJitter is the range of jitter to add to intervals to avoid contention issues + scheduleOnceJitter = 100 * time.Millisecond + + // propsLimit is the maximum length in bytes of the json-representation of a job's props. + // It exists to prevent job go rountines from consuming too much memory, as they are long running. + propsLimit = 10000 +) + +type JobOnceMetadata struct { + Key string + RunAt time.Time + Props any +} + +type JobOnce struct { + pluginAPI JobPluginAPI + clusterMutex *Mutex + + // key is the original key. It is prefixed with oncePrefix when used as a key in the KVStore + key string + props any + runAt time.Time + numFails int + + // done signals the job.run go routine to exit + done chan bool + doneOnce sync.Once + + // join is a join point for the job.run() goroutine to join the calling goroutine (in this case, + // the one calling job.Cancel) + join chan bool + joinOnce sync.Once + + storedCallback *syncedCallback + activeJobs *syncedJobs +} + +// Cancel terminates a scheduled job, preventing it from being scheduled on this plugin instance. +// It also removes the job from the db, preventing it from being run in the future. +func (j *JobOnce) Cancel() { + j.clusterMutex.Lock() + defer j.clusterMutex.Unlock() + + j.cancelWhileHoldingMutex() + + // join the running goroutine + j.joinOnce.Do(func() { + <-j.join + }) +} + +func newJobOnce(pluginAPI JobPluginAPI, key string, runAt time.Time, callback *syncedCallback, jobs *syncedJobs, props any) (*JobOnce, error) { + mutex, err := NewMutex(pluginAPI, key) + if err != nil { + return nil, errors.Wrap(err, "failed to create job mutex") + } + + propsBytes, err := json.Marshal(props) + if err != nil { + return nil, errors.Wrap(err, "failed to marshal props") + } + + if len(propsBytes) > propsLimit { + return nil, errors.Errorf("props length extends limit") + } + + return &JobOnce{ + pluginAPI: pluginAPI, + clusterMutex: mutex, + key: key, + props: props, + runAt: runAt, + done: make(chan bool), + join: make(chan bool), + storedCallback: callback, + activeJobs: jobs, + }, nil +} + +func (j *JobOnce) run() { + defer close(j.join) + + wait := time.Until(j.runAt) + + for { + select { + case <-j.done: + return + case <-time.After(wait + addJitter()): + } + + func() { + // Acquire the cluster mutex while we're trying to do the job + j.clusterMutex.Lock() + defer j.clusterMutex.Unlock() + + // Check that the job has not been completed + metadata, err := readMetadata(j.pluginAPI, j.key) + if err != nil { + j.numFails++ + if j.numFails > maxNumFails { + j.cancelWhileHoldingMutex() + return + } + + // wait a bit of time and try again + wait = waitAfterFail + return + } + + // If key doesn't exist, or if the runAt has changed, the original job has been completed already + if metadata == nil || !j.runAt.Equal(metadata.RunAt) { + j.cancelWhileHoldingMutex() + return + } + + j.executeJob() + + j.cancelWhileHoldingMutex() + }() + } +} + +func (j *JobOnce) executeJob() { + j.storedCallback.mu.Lock() + defer j.storedCallback.mu.Unlock() + + j.storedCallback.callback(j.key, j.props) +} + +// readMetadata reads the job's stored metadata. If the caller wishes to make an atomic +// read/write, the cluster mutex for job's key should be held. +func readMetadata(pluginAPI JobPluginAPI, key string) (*JobOnceMetadata, error) { + data, appErr := pluginAPI.KVGet(oncePrefix + key) + if appErr != nil { + return nil, errors.Wrap(normalizeAppErr(appErr), "failed to read data") + } + + if data == nil { + return nil, nil + } + + var metadata JobOnceMetadata + if err := json.Unmarshal(data, &metadata); err != nil { + return nil, errors.Wrap(err, "failed to decode data") + } + + return &metadata, nil +} + +// saveMetadata writes the job's metadata to the kvstore. saveMetadata acquires the job's cluster lock. +// saveMetadata will not overwrite an existing key. +func (j *JobOnce) saveMetadata() error { + j.clusterMutex.Lock() + defer j.clusterMutex.Unlock() + + metadata := JobOnceMetadata{ + Key: j.key, + Props: j.props, + RunAt: j.runAt, + } + data, err := json.Marshal(metadata) + if err != nil { + return errors.Wrap(err, "failed to marshal data") + } + + ok, appErr := j.pluginAPI.KVSetWithOptions(oncePrefix+j.key, data, model.PluginKVSetOptions{ + Atomic: true, + OldValue: nil, + }) + if appErr != nil { + return normalizeAppErr(appErr) + } + if !ok { + return errors.New("failed to set data") + } + + return nil +} + +// cancelWhileHoldingMutex assumes the caller holds the job's mutex. +func (j *JobOnce) cancelWhileHoldingMutex() { + // remove the job from the kv store, if it exists + _ = j.pluginAPI.KVDelete(oncePrefix + j.key) + + j.activeJobs.mu.Lock() + defer j.activeJobs.mu.Unlock() + delete(j.activeJobs.jobs, j.key) + + j.doneOnce.Do(func() { + close(j.done) + }) +} + +func addJitter() time.Duration { + return time.Duration(rand.Int63n(int64(scheduleOnceJitter))) +} + +func normalizeAppErr(appErr *model.AppError) error { + if appErr == nil { + return nil + } + + return appErr +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/pluginapi/cluster/job_once_scheduler.go b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/cluster/job_once_scheduler.go new file mode 100644 index 00000000..ca314397 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/cluster/job_once_scheduler.go @@ -0,0 +1,236 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package cluster + +import ( + "strings" + "sync" + "time" + + "github.com/pkg/errors" +) + +// syncedCallback uses the mutex to make things predictable for the client: the callback will be +// called once at a time (the client does not need to worry about concurrency within the callback) +type syncedCallback struct { + mu sync.Mutex + callback func(string, any) +} + +type syncedJobs struct { + mu sync.RWMutex + jobs map[string]*JobOnce +} + +type JobOnceScheduler struct { + pluginAPI JobPluginAPI + + startedMu sync.RWMutex + started bool + + activeJobs *syncedJobs + storedCallback *syncedCallback +} + +var schedulerOnce sync.Once +var s *JobOnceScheduler + +// GetJobOnceScheduler returns a scheduler which is ready to have its callback set. Repeated +// calls will return the same scheduler. +func GetJobOnceScheduler(pluginAPI JobPluginAPI) *JobOnceScheduler { + schedulerOnce.Do(func() { + s = &JobOnceScheduler{ + pluginAPI: pluginAPI, + activeJobs: &syncedJobs{ + jobs: make(map[string]*JobOnce), + }, + storedCallback: &syncedCallback{}, + } + }) + return s +} + +// Start starts the Scheduler. It finds all previous ScheduleOnce jobs and starts them running, and +// fires any jobs that have reached or exceeded their runAt time. Thus, even if a cluster goes down +// and is restarted, Start will restart previously scheduled jobs. +func (s *JobOnceScheduler) Start() error { + s.startedMu.Lock() + defer s.startedMu.Unlock() + if s.started { + return errors.New("scheduler has already been started") + } + + if err := s.verifyCallbackExists(); err != nil { + return errors.Wrap(err, "callback not found; cannot start scheduler") + } + + if err := s.scheduleNewJobsFromDB(); err != nil { + return errors.Wrap(err, "could not start JobOnceScheduler due to error") + } + + go s.pollForNewScheduledJobs() + + s.started = true + + return nil +} + +// SetCallback sets the scheduler's callback. When a job fires, the callback will be called with +// the job's id. +func (s *JobOnceScheduler) SetCallback(callback func(string, any)) error { + if callback == nil { + return errors.New("callback cannot be nil") + } + + s.storedCallback.mu.Lock() + defer s.storedCallback.mu.Unlock() + + s.storedCallback.callback = callback + return nil +} + +// ListScheduledJobs returns a list of the jobs in the db that have been scheduled. There is no +// guarantee that list is accurate by the time the caller reads the list. E.g., the jobs in the list +// may have been run, canceled, or new jobs may have scheduled. +func (s *JobOnceScheduler) ListScheduledJobs() ([]JobOnceMetadata, error) { + var ret []JobOnceMetadata + for i := 0; ; i++ { + keys, err := s.pluginAPI.KVList(i, keysPerPage) + if err != nil { + return nil, errors.Wrap(err, "error getting KVList") + } + for _, k := range keys { + if strings.HasPrefix(k, oncePrefix) { + metadata, err := readMetadata(s.pluginAPI, k[len(oncePrefix):]) + if err != nil { + s.pluginAPI.LogError(errors.Wrap(err, "could not retrieve data from plugin kvstore for key: "+k).Error()) + continue + } + if metadata == nil { + continue + } + + ret = append(ret, *metadata) + } + } + + if len(keys) < keysPerPage { + break + } + } + + return ret, nil +} + +// ScheduleOnce creates a scheduled job that will run once. When the clock reaches runAt, the +// callback will be called with key and props as the argument. +// +// If the job key already exists in the db, this will return an error. To reschedule a job, first +// cancel the original then schedule it again. +func (s *JobOnceScheduler) ScheduleOnce(key string, runAt time.Time, props any) (*JobOnce, error) { + s.startedMu.RLock() + defer s.startedMu.RUnlock() + if !s.started { + return nil, errors.New("start the scheduler before adding jobs") + } + + job, err := newJobOnce(s.pluginAPI, key, runAt, s.storedCallback, s.activeJobs, props) + if err != nil { + return nil, errors.Wrap(err, "could not create new job") + } + + if err = job.saveMetadata(); err != nil { + return nil, errors.Wrap(err, "could not save job metadata") + } + + s.runAndTrack(job) + + return job, nil +} + +// Cancel cancels a job by its key. This is useful if the plugin lost the original *JobOnce, or +// is stopping a job found in ListScheduledJobs(). +func (s *JobOnceScheduler) Cancel(key string) { + // using an anonymous function because job.Close() below needs access to the activeJobs mutex + job := func() *JobOnce { + s.activeJobs.mu.RLock() + defer s.activeJobs.mu.RUnlock() + j, ok := s.activeJobs.jobs[key] + if ok { + return j + } + + // Job wasn't active, so no need to call CancelWhileHoldingMutex (which shuts down the + // goroutine). There's a condition where another server in the cluster started the job, and + // the current server hasn't polled for it yet. To solve that case, delete it from the db. + mutex, err := NewMutex(s.pluginAPI, key) + if err != nil { + s.pluginAPI.LogError(errors.Wrap(err, "failed to create job mutex in Cancel for key: "+key).Error()) + } + mutex.Lock() + defer mutex.Unlock() + + _ = s.pluginAPI.KVDelete(oncePrefix + key) + + return nil + }() + + if job != nil { + job.Cancel() + } +} + +func (s *JobOnceScheduler) scheduleNewJobsFromDB() error { + scheduled, err := s.ListScheduledJobs() + if err != nil { + return errors.Wrap(err, "could not read scheduled jobs from db") + } + + for _, m := range scheduled { + job, err := newJobOnce(s.pluginAPI, m.Key, m.RunAt, s.storedCallback, s.activeJobs, m.Props) + if err != nil { + s.pluginAPI.LogError(errors.Wrap(err, "could not create new job for key: "+m.Key).Error()) + continue + } + + s.runAndTrack(job) + } + + return nil +} + +func (s *JobOnceScheduler) runAndTrack(job *JobOnce) { + s.activeJobs.mu.Lock() + defer s.activeJobs.mu.Unlock() + + // has this been scheduled already on this server? + if _, ok := s.activeJobs.jobs[job.key]; ok { + return + } + + go job.run() + + s.activeJobs.jobs[job.key] = job +} + +// pollForNewScheduledJobs will only be started once per plugin. It doesn't need to be stopped. +func (s *JobOnceScheduler) pollForNewScheduledJobs() { + for { + <-time.After(pollNewJobsInterval + addJitter()) + + if err := s.scheduleNewJobsFromDB(); err != nil { + s.pluginAPI.LogError("pluginAPI scheduleOnce poller encountered an error but is still polling", "error", err) + } + } +} + +func (s *JobOnceScheduler) verifyCallbackExists() error { + s.storedCallback.mu.Lock() + defer s.storedCallback.mu.Unlock() + + if s.storedCallback.callback == nil { + return errors.New("set callback before starting the scheduler") + } + return nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/pluginapi/cluster/mutex.go b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/cluster/mutex.go new file mode 100644 index 00000000..5b337c81 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/cluster/mutex.go @@ -0,0 +1,185 @@ +package cluster + +import ( + "context" + "sync" + "time" + + "github.com/mattermost/mattermost/server/public/model" + "github.com/pkg/errors" +) + +const ( + // mutexPrefix is used to namespace key values created for a mutex from other key values + // created by a plugin. + mutexPrefix = "mutex_" +) + +const ( + // ttl is the interval after which a locked mutex will expire unless refreshed + ttl = time.Second * 15 + + // refreshInterval is the interval on which the mutex will be refreshed when locked + refreshInterval = ttl / 2 +) + +// MutexPluginAPI is the plugin API interface required to manage mutexes. +type MutexPluginAPI interface { + KVSetWithOptions(key string, value []byte, options model.PluginKVSetOptions) (bool, *model.AppError) + LogError(msg string, keyValuePairs ...any) +} + +// Mutex is similar to sync.Mutex, except usable by multiple plugin instances across a cluster. +// +// Internally, a mutex relies on an atomic key-value set operation as exposed by the Mattermost +// plugin API. +// +// Mutexes with different names are unrelated. Mutexes with the same name from different plugins +// are unrelated. Pick a unique name for each mutex your plugin requires. +// +// A Mutex must not be copied after first use. +type Mutex struct { + pluginAPI MutexPluginAPI + key string + + // lock guards the variables used to manage the refresh task, and is not itself related to + // the cluster-wide lock. + lock sync.Mutex + stopRefresh chan bool + refreshDone chan bool +} + +// NewMutex creates a mutex with the given key name. +// +// Panics if key is empty. +func NewMutex(pluginAPI MutexPluginAPI, key string) (*Mutex, error) { + key, err := makeLockKey(key) + if err != nil { + return nil, err + } + + return &Mutex{ + pluginAPI: pluginAPI, + key: key, + }, nil +} + +// makeLockKey returns the prefixed key used to namespace mutex keys. +func makeLockKey(key string) (string, error) { + if key == "" { + return "", errors.New("must specify valid mutex key") + } + + return mutexPrefix + key, nil +} + +// lock makes a single attempt to atomically lock the mutex, returning true only if successful. +func (m *Mutex) tryLock() (bool, error) { + ok, err := m.pluginAPI.KVSetWithOptions(m.key, []byte{1}, model.PluginKVSetOptions{ + Atomic: true, + OldValue: nil, // No existing key value. + ExpireInSeconds: int64(ttl / time.Second), + }) + if err != nil { + return false, errors.Wrap(err, "failed to set mutex kv") + } + + return ok, nil +} + +// refreshLock rewrites the lock key value with a new expiry, returning true only if successful. +func (m *Mutex) refreshLock() error { + ok, err := m.pluginAPI.KVSetWithOptions(m.key, []byte{1}, model.PluginKVSetOptions{ + Atomic: true, + OldValue: []byte{1}, + ExpireInSeconds: int64(ttl / time.Second), + }) + if err != nil { + return errors.Wrap(err, "failed to refresh mutex kv") + } else if !ok { + return errors.New("unexpectedly failed to refresh mutex kv") + } + + return nil +} + +// Lock locks m. If the mutex is already locked by any plugin instance, including the current one, +// the calling goroutine blocks until the mutex can be locked. +func (m *Mutex) Lock() { + _ = m.LockWithContext(context.Background()) +} + +// LockWithContext locks m unless the context is canceled. If the mutex is already locked by any plugin +// instance, including the current one, the calling goroutine blocks until the mutex can be locked, +// or the context is canceled. +// +// The mutex is locked only if a nil error is returned. +func (m *Mutex) LockWithContext(ctx context.Context) error { + var waitInterval time.Duration + + for { + select { + case <-ctx.Done(): + return ctx.Err() + case <-time.After(waitInterval): + } + + locked, err := m.tryLock() + if err != nil { + m.pluginAPI.LogError("failed to lock mutex", "err", err, "lock_key", m.key) + waitInterval = nextWaitInterval(waitInterval, err) + continue + } else if !locked { + waitInterval = nextWaitInterval(waitInterval, err) + continue + } + + stop := make(chan bool) + done := make(chan bool) + go func() { + defer close(done) + t := time.NewTicker(refreshInterval) + for { + select { + case <-t.C: + err := m.refreshLock() + if err != nil { + m.pluginAPI.LogError("failed to refresh mutex", "err", err, "lock_key", m.key) + return + } + case <-stop: + return + } + } + }() + + m.lock.Lock() + m.stopRefresh = stop + m.refreshDone = done + m.lock.Unlock() + + return nil + } +} + +// Unlock unlocks m. It is a run-time error if m is not locked on entry to Unlock. +// +// Just like sync.Mutex, a locked Lock is not associated with a particular goroutine or plugin +// instance. It is allowed for one goroutine or plugin instance to lock a Lock and then arrange +// for another goroutine or plugin instance to unlock it. In practice, ownership of the lock should +// remain within a single plugin instance. +func (m *Mutex) Unlock() { + m.lock.Lock() + if m.stopRefresh == nil { + m.lock.Unlock() + panic("mutex has not been acquired") + } + + close(m.stopRefresh) + m.stopRefresh = nil + <-m.refreshDone + m.lock.Unlock() + + // If an error occurs deleting, the mutex kv will still expire, allowing later retry. + _, _ = m.pluginAPI.KVSetWithOptions(m.key, nil, model.PluginKVSetOptions{}) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/pluginapi/cluster/wait.go b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/cluster/wait.go new file mode 100644 index 00000000..bf62b4ac --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/cluster/wait.go @@ -0,0 +1,43 @@ +package cluster + +import ( + "math/rand" + "time" +) + +const ( + // minWaitInterval is the minimum amount of time to wait between locking attempts + minWaitInterval = 1 * time.Second + + // maxWaitInterval is the maximum amount of time to wait between locking attempts + maxWaitInterval = 5 * time.Minute + + // pollWaitInterval is the usual time to wait between unsuccessful locking attempts + pollWaitInterval = 1 * time.Second + + // jitterWaitInterval is the amount of jitter to add when waiting to avoid thundering herds + jitterWaitInterval = minWaitInterval / 2 +) + +// nextWaitInterval determines how long to wait until the next lock retry. +func nextWaitInterval(lastWaitInterval time.Duration, err error) time.Duration { + nextWaitInterval := lastWaitInterval + + if nextWaitInterval <= 0 { + nextWaitInterval = minWaitInterval + } + + if err != nil { + nextWaitInterval *= 2 + if nextWaitInterval > maxWaitInterval { + nextWaitInterval = maxWaitInterval + } + } else { + nextWaitInterval = pollWaitInterval + } + + // Add some jitter to avoid unnecessary collision between competing plugin instances. + nextWaitInterval += time.Duration(rand.Int63n(int64(jitterWaitInterval)) - int64(jitterWaitInterval)/2) + + return nextWaitInterval +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/pluginapi/configuration.go b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/configuration.go new file mode 100644 index 00000000..b78f0634 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/configuration.go @@ -0,0 +1,55 @@ +package pluginapi + +import ( + "github.com/mattermost/mattermost/server/public/model" + "github.com/mattermost/mattermost/server/public/plugin" +) + +// ConfigurationService exposes methods to manipulate the server and plugin configuration. +type ConfigurationService struct { + api plugin.API +} + +// LoadPluginConfiguration loads the plugin's configuration. dest should be a pointer to a +// struct to which the configuration JSON can be unmarshalled. +// +// Minimum server version: 5.2 +func (c *ConfigurationService) LoadPluginConfiguration(dest any) error { + // TODO: Isn't this method redundant given GetPluginConfig() and even GetConfig()? + return c.api.LoadPluginConfiguration(dest) +} + +// GetConfig fetches the currently persisted config. +// +// Minimum server version: 5.2 +func (c *ConfigurationService) GetConfig() *model.Config { + return c.api.GetConfig() +} + +// GetUnsanitizedConfig fetches the currently persisted config without removing secrets. +// +// Minimum server version: 5.16 +func (c *ConfigurationService) GetUnsanitizedConfig() *model.Config { + return c.api.GetUnsanitizedConfig() +} + +// SaveConfig sets the given config and persists the changes +// +// Minimum server version: 5.2 +func (c *ConfigurationService) SaveConfig(cfg *model.Config) error { + return normalizeAppErr(c.api.SaveConfig(cfg)) +} + +// GetPluginConfig fetches the currently persisted config of plugin +// +// Minimum server version: 5.6 +func (c *ConfigurationService) GetPluginConfig() map[string]any { + return c.api.GetPluginConfig() +} + +// SavePluginConfig sets the given config for plugin and persists the changes +// +// Minimum server version: 5.6 +func (c *ConfigurationService) SavePluginConfig(cfg map[string]any) error { + return normalizeAppErr(c.api.SavePluginConfig(cfg)) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/pluginapi/email.go b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/email.go new file mode 100644 index 00000000..80a4d97c --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/email.go @@ -0,0 +1,17 @@ +package pluginapi + +import ( + "github.com/mattermost/mattermost/server/public/plugin" +) + +// MailService exposes methods to send email. +type MailService struct { + api plugin.API +} + +// Send sends an email to a specific address. +// +// Minimum server version: 5.7 +func (m *MailService) Send(to, subject, htmlBody string) error { + return normalizeAppErr(m.api.SendMail(to, subject, htmlBody)) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/pluginapi/emoji.go b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/emoji.go new file mode 100644 index 00000000..ec21de80 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/emoji.go @@ -0,0 +1,54 @@ +package pluginapi + +import ( + "bytes" + "io" + + "github.com/mattermost/mattermost/server/public/model" + "github.com/mattermost/mattermost/server/public/plugin" +) + +// EmojiService exposes methods to manipulate emojis. +type EmojiService struct { + api plugin.API +} + +// Get gets a custom emoji by id. +// +// Minimum server version: 5.6 +func (e *EmojiService) Get(id string) (*model.Emoji, error) { + emoji, appErr := e.api.GetEmoji(id) + + return emoji, normalizeAppErr(appErr) +} + +// GetByName gets a custom emoji by its name. +// +// Minimum server version: 5.6 +func (e *EmojiService) GetByName(name string) (*model.Emoji, error) { + emoji, appErr := e.api.GetEmojiByName(name) + + return emoji, normalizeAppErr(appErr) +} + +// GetImage gets a custom emoji's content and format by id. +// +// Minimum server version: 5.6 +func (e *EmojiService) GetImage(id string) (io.Reader, string, error) { + contentBytes, format, appErr := e.api.GetEmojiImage(id) + if appErr != nil { + return nil, "", normalizeAppErr(appErr) + } + + return bytes.NewReader(contentBytes), format, nil +} + +// List retrieves a list of custom emojis. +// sortBy parameter can be: "name". +// +// Minimum server version: 5.6 +func (e *EmojiService) List(sortBy string, page, count int) ([]*model.Emoji, error) { + emojis, appErr := e.api.GetEmojiList(sortBy, page, count) + + return emojis, normalizeAppErr(appErr) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/pluginapi/error.go b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/error.go new file mode 100644 index 00000000..32765915 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/error.go @@ -0,0 +1,39 @@ +package pluginapi + +import ( + "net/http" + + "github.com/pkg/errors" + + "github.com/mattermost/mattermost/server/public/model" +) + +// ErrNotFound is returned by the plugin API when an object is not found. +var ErrNotFound = errors.New("not found") + +// normalizeAppErr returns a truly nil error if appErr is nil as well as normalizing a class +// of non-nil AppErrors to simplify use within plugins. +// +// This doesn't happen automatically when a *model.AppError is cast to an error, since the +// resulting error interface has a concrete type with a nil value. This leads to the seemingly +// impossible: +// +// var err error +// err = func() *model.AppError { return nil }() +// if err != nil { +// panic("err != nil, which surprises most") +// } +// +// Fix this problem for all plugin authors by normalizing to special case the handling of a nil +// *model.AppError. See https://golang.org/doc/faq#nil_error for more details. +func normalizeAppErr(appErr *model.AppError) error { + if appErr == nil { + return nil + } + + if appErr.StatusCode == http.StatusNotFound { + return ErrNotFound + } + + return appErr +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/pluginapi/file.go b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/file.go new file mode 100644 index 00000000..5e33627e --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/file.go @@ -0,0 +1,93 @@ +package pluginapi + +import ( + "bytes" + "io" + + "github.com/mattermost/mattermost/server/public/model" + "github.com/mattermost/mattermost/server/public/plugin" +) + +// FileService exposes methods to manipulate files, most often as post attachments. +type FileService struct { + api plugin.API +} + +// Get gets content of a file by id. +// +// Minimum server version: 5.8 +func (f *FileService) Get(id string) (io.Reader, error) { + contentBytes, appErr := f.api.GetFile(id) + if appErr != nil { + return nil, normalizeAppErr(appErr) + } + + return bytes.NewReader(contentBytes), nil +} + +// GetByPath reads a file by its path on the dist. +// +// Minimum server version: 5.3 +func (f *FileService) GetByPath(path string) (io.Reader, error) { + contentBytes, appErr := f.api.ReadFile(path) + if appErr != nil { + return nil, normalizeAppErr(appErr) + } + + return bytes.NewReader(contentBytes), nil +} + +// GetInfo gets a file's info by id. +// +// Minimum server version: 5.3 +func (f *FileService) GetInfo(id string) (*model.FileInfo, error) { + info, appErr := f.api.GetFileInfo(id) + + return info, normalizeAppErr(appErr) +} + +// SetSearchableContent update the File Info searchable text for full text search +// +// Minimum server version: 9.1 +func (f *FileService) SetSearchableContent(id string, content string) error { + appErr := f.api.SetFileSearchableContent(id, content) + return normalizeAppErr(appErr) +} + +// GetLink gets the public link of a file by id. +// +// Minimum server version: 5.6 +func (f *FileService) GetLink(id string) (string, error) { + link, appErr := f.api.GetFileLink(id) + + return link, normalizeAppErr(appErr) +} + +// Upload uploads a file to a channel to be later attached to a post. +// +// Minimum server version: 5.6 +func (f *FileService) Upload(content io.Reader, fileName, channelID string) (*model.FileInfo, error) { + contentBytes, err := io.ReadAll(content) + if err != nil { + return nil, err + } + + info, appErr := f.api.UploadFile(contentBytes, channelID, fileName) + + return info, normalizeAppErr(appErr) +} + +// CopyInfos duplicates the FileInfo objects referenced by the given file ids, recording +// the given user id as the new creator and returning the new set of file ids. +// +// The duplicate FileInfo objects are not initially linked to a post, but may now be passed +// on creation of a post. +// Use this API to duplicate a post and its file attachments without actually duplicating +// the uploaded files. +// +// Minimum server version: 5.2 +func (f *FileService) CopyInfos(ids []string, userID string) ([]string, error) { + newIDs, appErr := f.api.CopyFileInfos(userID, ids) + + return newIDs, normalizeAppErr(appErr) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/pluginapi/frontend.go b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/frontend.go new file mode 100644 index 00000000..a0382318 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/frontend.go @@ -0,0 +1,30 @@ +package pluginapi + +import ( + "github.com/mattermost/mattermost/server/public/model" + "github.com/mattermost/mattermost/server/public/plugin" +) + +// FrontendService exposes methods to interact with the frontend. +type FrontendService struct { + api plugin.API +} + +// OpenInteractiveDialog will open an interactive dialog on a user's client that +// generated the trigger ID. Used with interactive message buttons, menus +// and slash commands. +// +// Minimum server version: 5.6 +func (f *FrontendService) OpenInteractiveDialog(dialog model.OpenDialogRequest) error { + return normalizeAppErr(f.api.OpenInteractiveDialog(dialog)) +} + +// PublishWebSocketEvent sends an event to WebSocket connections. +// event is the type and will be prepended with "custom__". +// payload is the data sent with the event. Interface values must be primitive Go types or mattermost-server/model types. +// broadcast determines to which users to send the event. +// +// Minimum server version: 5.2 +func (f *FrontendService) PublishWebSocketEvent(event string, payload map[string]any, broadcast *model.WebsocketBroadcast) { + f.api.PublishWebSocketEvent(event, payload, broadcast) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/pluginapi/group.go b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/group.go new file mode 100644 index 00000000..284644f6 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/group.go @@ -0,0 +1,185 @@ +package pluginapi + +import ( + "github.com/mattermost/mattermost/server/public/model" + "github.com/mattermost/mattermost/server/public/plugin" +) + +// GroupService exposes methods to manipulate groups. +type GroupService struct { + api plugin.API +} + +// Get gets a group by ID. +// +// Minimum server version: 5.18 +func (g *GroupService) Get(groupID string) (*model.Group, error) { + group, appErr := g.api.GetGroup(groupID) + + return group, normalizeAppErr(appErr) +} + +// GetByName gets a group by name. +// +// Minimum server version: 5.18 +func (g *GroupService) GetByName(name string) (*model.Group, error) { + group, appErr := g.api.GetGroupByName(name) + + return group, normalizeAppErr(appErr) +} + +// GetMemberUsers gets a page of users from the given group. +// +// Minimum server version: 5.35 +func (g *GroupService) GetMemberUsers(groupID string, page, perPage int) ([]*model.User, error) { + users, appErr := g.api.GetGroupMemberUsers(groupID, page, perPage) + + return users, normalizeAppErr(appErr) +} + +// GetBySource gets a list of all groups for the given source. +// +// @tag Group +// Minimum server version: 5.35 +func (g *GroupService) GetBySource(groupSource model.GroupSource) ([]*model.Group, error) { + groups, appErr := g.api.GetGroupsBySource(groupSource) + + return groups, normalizeAppErr(appErr) +} + +// ListForUser gets the groups a user is in. +// +// Minimum server version: 5.18 +func (g *GroupService) ListForUser(userID string) ([]*model.Group, error) { + groups, appErr := g.api.GetGroupsForUser(userID) + + return groups, normalizeAppErr(appErr) +} + +// UpsertMember adds a user to a group or updates their existing membership. +// +// Minimum server version: 10.7 +func (g *GroupService) UpsertMember(groupID string, userID string) (*model.GroupMember, error) { + member, appErr := g.api.UpsertGroupMember(groupID, userID) + return member, normalizeAppErr(appErr) +} + +// UpsertMembers adds multiple users to a group or updates their existing memberships. +// +// Minimum server version: 10.7 +func (g *GroupService) UpsertMembers(groupID string, userIDs []string) ([]*model.GroupMember, error) { + members, appErr := g.api.UpsertGroupMembers(groupID, userIDs) + return members, normalizeAppErr(appErr) +} + +// GetByRemoteID gets a group by its remote ID. +// +// Minimum server version: 10.7 +func (g *GroupService) GetByRemoteID(remoteID string, groupSource model.GroupSource) (*model.Group, error) { + group, appErr := g.api.GetGroupByRemoteID(remoteID, groupSource) + return group, normalizeAppErr(appErr) +} + +// Create creates a new group. +// +// Minimum server version: 10.7 +func (g *GroupService) Create(group *model.Group) (*model.Group, error) { + group, appErr := g.api.CreateGroup(group) + return group, normalizeAppErr(appErr) +} + +// Update updates a group. +// +// Minimum server version: 10.7 +func (g *GroupService) Update(group *model.Group) (*model.Group, error) { + group, appErr := g.api.UpdateGroup(group) + return group, normalizeAppErr(appErr) +} + +// Delete soft deletes a group. +// +// Minimum server version: 10.7 +func (g *GroupService) Delete(groupID string) (*model.Group, error) { + group, appErr := g.api.DeleteGroup(groupID) + return group, normalizeAppErr(appErr) +} + +// Restore restores a soft deleted group. +// +// Minimum server version: 10.7 +func (g *GroupService) Restore(groupID string) (*model.Group, error) { + group, appErr := g.api.RestoreGroup(groupID) + return group, normalizeAppErr(appErr) +} + +// DeleteMember removes a user from a group. +// +// Minimum server version: 10.7 +func (g *GroupService) DeleteMember(groupID string, userID string) (*model.GroupMember, error) { + member, appErr := g.api.DeleteGroupMember(groupID, userID) + return member, normalizeAppErr(appErr) +} + +// GetSyncable gets a group syncable. +// +// Minimum server version: 10.7 +func (g *GroupService) GetSyncable(groupID string, syncableID string, syncableType model.GroupSyncableType) (*model.GroupSyncable, error) { + syncable, appErr := g.api.GetGroupSyncable(groupID, syncableID, syncableType) + return syncable, normalizeAppErr(appErr) +} + +// GetSyncables gets all group syncables for the given group. +// +// Minimum server version: 10.7 +func (g *GroupService) GetSyncables(groupID string, syncableType model.GroupSyncableType) ([]*model.GroupSyncable, error) { + syncables, appErr := g.api.GetGroupSyncables(groupID, syncableType) + return syncables, normalizeAppErr(appErr) +} + +// UpsertSyncable creates or updates a group syncable. +// +// Minimum server version: 10.7 +func (g *GroupService) UpsertSyncable(groupSyncable *model.GroupSyncable) (*model.GroupSyncable, error) { + syncable, appErr := g.api.UpsertGroupSyncable(groupSyncable) + return syncable, normalizeAppErr(appErr) +} + +// UpdateSyncable updates a group syncable. +// +// Minimum server version: 10.7 +func (g *GroupService) UpdateSyncable(groupSyncable *model.GroupSyncable) (*model.GroupSyncable, error) { + syncable, appErr := g.api.UpdateGroupSyncable(groupSyncable) + return syncable, normalizeAppErr(appErr) +} + +// DeleteSyncable deletes a group syncable. +// +// Minimum server version: 10.7 +func (g *GroupService) DeleteSyncable(groupID string, syncableID string, syncableType model.GroupSyncableType) (*model.GroupSyncable, error) { + syncable, appErr := g.api.DeleteGroupSyncable(groupID, syncableID, syncableType) + return syncable, normalizeAppErr(appErr) +} + +// GetGroups returns a list of all groups with the given options and restrictions. +// +// Minimum server version: 10.7 +func (g *GroupService) GetGroups(page, perPage int, opts model.GroupSearchOpts, viewRestrictions *model.ViewUsersRestrictions) ([]*model.Group, error) { + groups, appErr := g.api.GetGroups(page, perPage, opts, viewRestrictions) + return groups, normalizeAppErr(appErr) +} + +// CreateDefaultSyncableMemberships creates default syncable memberships based off the provided parameters. +// +// Minimum server version: 10.9 +func (g *GroupService) CreateDefaultSyncableMemberships(params model.CreateDefaultMembershipParams) error { + appErr := g.api.CreateDefaultSyncableMemberships(params) + return normalizeAppErr(appErr) +} + +// DeleteGroupConstrainedMemberships deletes team and channel memberships of users who aren't members of the allowed groups of all group-constrained teams and channels. +// +// Minimum server version: 10.9 +func (g *GroupService) DeleteGroupConstrainedMemberships() error { + appErr := g.api.DeleteGroupConstrainedMemberships() + return normalizeAppErr(appErr) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/pluginapi/kv.go b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/kv.go new file mode 100644 index 00000000..4e7e45d0 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/kv.go @@ -0,0 +1,278 @@ +package pluginapi + +import ( + "encoding/json" + "fmt" + "strings" + "time" + + "github.com/pkg/errors" + + "github.com/mattermost/mattermost/server/public/model" + "github.com/mattermost/mattermost/server/public/plugin" +) + +// numRetries is the number of times the setAtomicWithRetries will retry before returning an error. +const numRetries = 5 + +// KVService exposes methods to read and write key-value pairs for the active plugin. +// +// This service cannot be used to read or write key-value pairs for other plugins. +type KVService struct { + api plugin.API +} + +type KVSetOptions struct { + model.PluginKVSetOptions + oldValue any +} + +// KVSetOption is an option passed to Set() operation. +type KVSetOption func(*KVSetOptions) + +// SetAtomic guarantees the write will occur only when the current value of matches the given old +// value. A client is expected to read the old value first, then pass it back to ensure the value +// has not since been modified. +func SetAtomic(oldValue any) KVSetOption { + return func(o *KVSetOptions) { + o.Atomic = true + o.oldValue = oldValue + } +} + +// SetExpiry configures a key value to expire after the given duration relative to now. +func SetExpiry(ttl time.Duration) KVSetOption { + return func(o *KVSetOptions) { + o.ExpireInSeconds = int64(ttl / time.Second) + } +} + +// Set stores a key-value pair, unique per plugin. +// Keys prefixed with `mmi_` are reserved for internal use and will fail to be set. +// +// Returns (false, err) if DB error occurred +// Returns (false, nil) if the value was not set +// Returns (true, nil) if the value was set +// +// Minimum server version: 5.18 +func (k *KVService) Set(key string, value any, options ...KVSetOption) (bool, error) { + if strings.HasPrefix(key, internalKeyPrefix) { + return false, errors.Errorf("'%s' prefix is not allowed for keys", internalKeyPrefix) + } + + opts := KVSetOptions{} + for _, o := range options { + o(&opts) + } + + var valueBytes []byte + if value != nil { + // Assume JSON encoding, unless explicitly given a byte slice. + var isValueInBytes bool + valueBytes, isValueInBytes = value.([]byte) + if !isValueInBytes { + var err error + valueBytes, err = json.Marshal(value) + if err != nil { + return false, errors.Wrapf(err, "failed to marshal value %v", value) + } + } + } + + downstreamOpts := model.PluginKVSetOptions{ + Atomic: opts.Atomic, + ExpireInSeconds: opts.ExpireInSeconds, + } + + if opts.oldValue != nil { + oldValueBytes, isOldValueInBytes := opts.oldValue.([]byte) + if isOldValueInBytes { + downstreamOpts.OldValue = oldValueBytes + } else { + data, err := json.Marshal(opts.oldValue) + if err != nil { + return false, errors.Wrapf(err, "failed to marshal value %v", opts.oldValue) + } + + downstreamOpts.OldValue = data + } + } + + written, appErr := k.api.KVSetWithOptions(key, valueBytes, downstreamOpts) + return written, normalizeAppErr(appErr) +} + +// SetAtomicWithRetries will set a key-value pair atomically using compare and set semantics: +// it will read key's value (to get oldValue), perform valueFunc (to get newValue), +// and compare and set (comparing oldValue and setting newValue). +// +// Parameters: +// +// `key` is the key to get and set. +// `valueFunc` is a user-provided function that will take the old value as a []byte and +// return the new value or an error. If valueFunc needs to operate on +// oldValue, it will need to use the oldValue as a []byte, or convert +// oldValue into the expected type (e.g., by parsing it, or marshaling it +// into the expected struct). It should then return the newValue as the type +// expected to be stored. +// +// Returns: +// +// Returns err if the key could not be retrieved (DB error), valueFunc returned an error, +// if the key could not be set (DB error), or if the key could not be set (after retries). +// Returns nil if the value was set. +// +// Minimum server version: 5.18 +func (k *KVService) SetAtomicWithRetries(key string, valueFunc func(oldValue []byte) (newValue any, err error)) error { + for range numRetries { + var oldVal []byte + if err := k.Get(key, &oldVal); err != nil { + return errors.Wrapf(err, "failed to get value for key %s", key) + } + + newVal, err := valueFunc(oldVal) + if err != nil { + return errors.Wrap(err, "valueFunc failed") + } + + if saved, err := k.Set(key, newVal, SetAtomic(oldVal)); err != nil { + return errors.Wrapf(err, "DB failed to set value for key %s", key) + } else if saved { + return nil + } + + // small delay to allow cooperative scheduling to do its thing + time.Sleep(10 * time.Millisecond) + } + return fmt.Errorf("failed to set value after %d retries", numRetries) +} + +// Get gets the value for the given key into the given interface. +// +// An error is returned only if the value cannot be fetched. A non-existent key will return no +// error, with nothing written to the given interface. +// +// Minimum server version: 5.2 +func (k *KVService) Get(key string, o any) error { + data, appErr := k.api.KVGet(key) + if appErr != nil { + return normalizeAppErr(appErr) + } + + if len(data) == 0 { + return nil + } + + if bytesOut, ok := o.(*[]byte); ok { + *bytesOut = data + return nil + } + + if err := json.Unmarshal(data, o); err != nil { + return errors.Wrapf(err, "failed to unmarshal value for key %s", key) + } + + return nil +} + +// Delete deletes the given key-value pair. +// +// An error is returned only if the value failed to be deleted. A non-existent key will return +// no error. +// +// Minimum server version: 5.18 +func (k *KVService) Delete(key string) error { + _, err := k.Set(key, nil) + return err +} + +// DeleteAll removes all key-value pairs. +// +// Minimum server version: 5.6 +func (k *KVService) DeleteAll() error { + return normalizeAppErr(k.api.KVDeleteAll()) +} + +// ListKeysOption used to configure a ListKeys() operation. +type ListKeysOption func(*listKeysOptions) + +// listKeysOptions holds configurations of a ListKeys() operation. +type listKeysOptions struct { + checkers []func(key string) (keep bool, err error) +} + +func (o *listKeysOptions) checkAll(key string) (keep bool, err error) { + for _, check := range o.checkers { + keep, err := check(key) + if err != nil { + return false, err + } + if !keep { + return false, nil + } + } + + // key made it through all checkers + return true, nil +} + +// WithPrefix only return keys that start with the given string. +func WithPrefix(prefix string) ListKeysOption { + return WithChecker(func(key string) (keep bool, err error) { + return strings.HasPrefix(key, prefix), nil + }) +} + +// WithChecker allows for a custom filter function to determine which keys to return. +// Returning true will keep the key and false will filter it out. Returning an error +// will halt KVListWithOptions immediately and pass the error up (with no other results). +func WithChecker(f func(key string) (keep bool, err error)) ListKeysOption { + return func(args *listKeysOptions) { + args.checkers = append(args.checkers, f) + } +} + +// ListKeys lists all keys that match the given options. If no options are provided then all keys are returned. +// +// Minimum server version: 5.6 +func (k *KVService) ListKeys(page, count int, options ...ListKeysOption) ([]string, error) { + // convert functional options into args struct + args := &listKeysOptions{ + checkers: nil, + } + for _, opt := range options { + opt(args) + } + + // get our keys a batch at a time, filter out the ones we don't want based on our args + // any errors will hault the whole process and return the error raw + + keys, appErr := k.api.KVList(page, count) + if appErr != nil { + return nil, normalizeAppErr(appErr) + } + + if len(args.checkers) == 0 { + // no checkers, just return the keys + return keys, nil + } + + ret := make([]string, 0) + // we have a filter, so check each key, all checkers must say key + // for us to keep a key + for _, key := range keys { + keep, err := args.checkAll(key) + if err != nil { + return nil, err + } + + if !keep { + continue + } + + // didn't get filtered out, add to our return + ret = append(ret, key) + } + + return ret, nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/pluginapi/kv_memory.go b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/kv_memory.go new file mode 100644 index 00000000..ecb362aa --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/kv_memory.go @@ -0,0 +1,283 @@ +package pluginapi + +import ( + "bytes" + "encoding/json" + "slices" + "strings" + "sync" + "time" + "unicode/utf8" + + "github.com/mattermost/mattermost/server/public/model" + + "github.com/pkg/errors" +) + +// MemoryStore is an implementation of the plugin KV store API for testing. +// It's not meant for production use. +// It's safe for concurrent use by multiple goroutines. +type MemoryStore struct { + mux sync.RWMutex + elems map[string]kvElem +} + +type kvElem struct { + value []byte + expiresAt *time.Time +} + +func (e kvElem) isExpired() bool { + return e.expiresAt != nil && e.expiresAt.Before(time.Now()) +} + +// Set stores a key-value pair, unique per plugin. +// Keys prefixed with `mmi_` are reserved for internal use and will fail to be set. +// +// Returns (false, err) if DB error occurred +// Returns (false, nil) if the value was not set +// Returns (true, nil) if the value was set +func (s *MemoryStore) Set(key string, value any, options ...KVSetOption) (bool, error) { + if key == "" { + return false, errors.New("key must not be empty") + } + + if strings.HasPrefix(key, internalKeyPrefix) { + return false, errors.Errorf("'%s' prefix is not allowed for keys", internalKeyPrefix) + } + + if utf8.RuneCountInString(key) > model.KeyValueKeyMaxRunes { + return false, errors.Errorf("key must not be longer then %d", model.KeyValueKeyMaxRunes) + } + + opts := KVSetOptions{} + for _, o := range options { + if o != nil { + o(&opts) + } + } + + var valueBytes []byte + if value != nil { + // Assume JSON encoding, unless explicitly given a byte slice. + var isValueInBytes bool + valueBytes, isValueInBytes = value.([]byte) + if !isValueInBytes { + var err error + valueBytes, err = json.Marshal(value) + if err != nil { + return false, errors.Wrapf(err, "failed to marshal value %v", value) + } + } + } + + downstreamOpts := model.PluginKVSetOptions{ + Atomic: opts.Atomic, + ExpireInSeconds: opts.ExpireInSeconds, + } + + if opts.oldValue != nil { + oldValueBytes, isOldValueInBytes := opts.oldValue.([]byte) + if isOldValueInBytes { + downstreamOpts.OldValue = oldValueBytes + } else { + data, err := json.Marshal(opts.oldValue) + if err != nil { + return false, errors.Wrapf(err, "failed to marshal value %v", opts.oldValue) + } + + downstreamOpts.OldValue = data + } + } + + if err := downstreamOpts.IsValid(); err != nil { + return false, err + } + + s.mux.Lock() + defer s.mux.Unlock() + + if s.elems == nil { + s.elems = make(map[string]kvElem) + } + + if !opts.Atomic { + if value == nil { + delete(s.elems, key) + } else { + s.elems[key] = kvElem{ + value: valueBytes, + expiresAt: expireTime(downstreamOpts.ExpireInSeconds), + } + } + + return true, nil + } + + oldElem := s.elems[key] + if !oldElem.isExpired() && !bytes.Equal(oldElem.value, downstreamOpts.OldValue) { + return false, nil + } + + if value == nil { + delete(s.elems, key) + } else { + s.elems[key] = kvElem{ + value: valueBytes, + expiresAt: expireTime(downstreamOpts.ExpireInSeconds), + } + } + + return true, nil +} + +func (s *MemoryStore) SetAtomicWithRetries(key string, valueFunc func(oldValue []byte) (newValue any, err error)) error { + if valueFunc == nil { + return errors.New("function must not be nil") + } + + for range numRetries { + var oldVal []byte + if err := s.Get(key, &oldVal); err != nil { + return errors.Wrapf(err, "failed to get value for key %s", key) + } + + newVal, err := valueFunc(oldVal) + if err != nil { + return errors.Wrap(err, "valueFunc failed") + } + + if saved, err := s.Set(key, newVal, SetAtomic(oldVal)); err != nil { + return errors.Wrapf(err, "DB failed to set value for key %s", key) + } else if saved { + return nil + } + + // small delay to allow cooperative scheduling to do its thing + time.Sleep(10 * time.Millisecond) + } + return errors.Errorf("failed to set value after %d retries", numRetries) +} + +func (s *MemoryStore) ListKeys(page int, count int, options ...ListKeysOption) ([]string, error) { + if page < 0 { + return nil, errors.New("page number must not be negative") + } + + if count < 0 { + return nil, errors.New("count must not be negative") + } + + if count == 0 { + return []string{}, nil + } + + opt := listKeysOptions{} + for _, o := range options { + if o != nil { + o(&opt) + } + } + + allKeys := make([]string, 0) + s.mux.RLock() + for k, e := range s.elems { + if e.isExpired() { + continue + } + allKeys = append(allKeys, k) + } + s.mux.RUnlock() + + if len(allKeys) == 0 { + return []string{}, nil + } + + slices.Sort(allKeys) + + pageKeys := paginateSlice(allKeys, page, count) + + if len(opt.checkers) == 0 { + return pageKeys, nil + } + + n := 0 + for _, k := range pageKeys { + keep := true + for _, c := range opt.checkers { + ok, err := c(k) + if err != nil { + return nil, err + } + if !ok { + keep = false + break + } + } + + if keep { + pageKeys[n] = k + n++ + } + } + + return pageKeys[:n], nil +} + +func (s *MemoryStore) Get(key string, o any) error { + s.mux.RLock() + e, ok := s.elems[key] + s.mux.RUnlock() + if !ok || len(e.value) == 0 || e.isExpired() { + return nil + } + + if bytesOut, ok := o.(*[]byte); ok { + *bytesOut = e.value + return nil + } + + if err := json.Unmarshal(e.value, o); err != nil { + return errors.Wrapf(err, "failed to unmarshal value for key %s", key) + } + + return nil +} + +func (s *MemoryStore) Delete(key string) error { + s.mux.Lock() + delete(s.elems, key) + s.mux.Unlock() + + return nil +} + +// DeleteAll removes all key-value pairs. +func (s *MemoryStore) DeleteAll() error { + s.mux.Lock() + s.elems = make(map[string]kvElem) + s.mux.Unlock() + + return nil +} + +func expireTime(expireInSeconds int64) *time.Time { + if expireInSeconds == 0 { + return nil + } + t := time.Now().Add(time.Second * time.Duration(expireInSeconds)) + return &t +} + +func paginateSlice[T any](list []T, page int, perPage int) []T { + i := page * perPage + j := (page + 1) * perPage + l := len(list) + if j > l { + j = l + } + if i > l { + i = l + } + return list[i:j] +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/pluginapi/license.go b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/license.go new file mode 100644 index 00000000..42b3c78b --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/license.go @@ -0,0 +1,108 @@ +package pluginapi + +import ( + "github.com/mattermost/mattermost/server/public/model" +) + +// IsEnterpriseLicensedOrDevelopment returns true when the server is licensed with any Mattermost +// Enterprise License, or has `EnableDeveloper` and `EnableTesting` configuration settings +// enabled signaling a non-production, developer mode. +func IsEnterpriseLicensedOrDevelopment(config *model.Config, license *model.License) bool { + if license != nil { + return true + } + + return IsConfiguredForDevelopment(config) +} + +// isValidSkuShortName returns whether the SKU short name is one of the known strings; +// namely: E10 or professional, or E20 or enterprise +func isValidSkuShortName(license *model.License) bool { + if license == nil { + return false + } + + switch license.SkuShortName { + case model.LicenseShortSkuE10, model.LicenseShortSkuE20, model.LicenseShortSkuProfessional, model.LicenseShortSkuEnterprise, model.LicenseShortSkuEnterpriseAdvanced: + return true + default: + return false + } +} + +// IsE10LicensedOrDevelopment returns true when the server is at least licensed with a legacy Mattermost +// Enterprise E10 License or a Mattermost Professional License, or has `EnableDeveloper` and +// `EnableTesting` configuration settings enabled, signaling a non-production, developer mode. +func IsE10LicensedOrDevelopment(config *model.Config, license *model.License) bool { + if model.MinimumProfessionalLicense(license) { + return true + } + + if !isValidSkuShortName(license) { + // As a fallback for licenses whose SKU short name is unknown, make a best effort to try + // and use the presence of a known E10/Professional feature as a check to determine licensing. + if license != nil && + license.Features != nil && + license.Features.LDAP != nil && + *license.Features.LDAP { + return true + } + } + + return IsConfiguredForDevelopment(config) +} + +// IsE20LicensedOrDevelopment returns true when the server is licensed with a legacy Mattermost +// Enterprise E20 License or a Mattermost Enterprise License, or has `EnableDeveloper` and +// `EnableTesting` configuration settings enabled, signaling a non-production, developer mode. +func IsE20LicensedOrDevelopment(config *model.Config, license *model.License) bool { + if model.MinimumEnterpriseLicense(license) { + return true + } + + if !isValidSkuShortName(license) { + // As a fallback for licenses whose SKU short name is unknown, make a best effort to try + // and use the presence of a known E20/Enterprise feature as a check to determine licensing. + if license != nil && + license.Features != nil && + license.Features.FutureFeatures != nil && + *license.Features.FutureFeatures { + return true + } + } + + return IsConfiguredForDevelopment(config) +} + +// IsEnterpriseAdvancedLicensedOrDevelopment returns true when the server is licensed with a Mattermost +// Enterprise Advanced License, or has `EnableDeveloper` and `EnableTesting` configuration settings +func IsEnterpriseAdvancedLicensedOrDevelopment(config *model.Config, license *model.License) bool { + if license != nil && license.SkuShortName == model.LicenseShortSkuEnterpriseAdvanced { + return true + } + + return IsConfiguredForDevelopment(config) +} + +// IsConfiguredForDevelopment returns true when the server has `EnableDeveloper` and `EnableTesting` +// configuration settings enabled, signaling a non-production, developer mode. +func IsConfiguredForDevelopment(config *model.Config) bool { + if config != nil && + config.ServiceSettings.EnableTesting != nil && + *config.ServiceSettings.EnableTesting && + config.ServiceSettings.EnableDeveloper != nil && + *config.ServiceSettings.EnableDeveloper { + return true + } + + return false +} + +// IsCloud returns true when the server is on cloud, and false otherwise. +func IsCloud(license *model.License) bool { + if license == nil || license.Features == nil || license.Features.Cloud == nil { + return false + } + + return *license.Features.Cloud +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/pluginapi/log.go b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/log.go new file mode 100644 index 00000000..9974021c --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/log.go @@ -0,0 +1,33 @@ +package pluginapi + +import ( + "github.com/mattermost/mattermost/server/public/plugin" +) + +// LogService exposes methods to log to the Mattermost server log. +// +// Note that standard error is automatically sent to the Mattermost server log, and standard +// output is redirected to standard error. This service enables optional structured logging. +type LogService struct { + api plugin.API +} + +// Error logs an error message, optionally structured with alternating key, value parameters. +func (l *LogService) Error(message string, keyValuePairs ...any) { + l.api.LogError(message, keyValuePairs...) +} + +// Warn logs an error message, optionally structured with alternating key, value parameters. +func (l *LogService) Warn(message string, keyValuePairs ...any) { + l.api.LogWarn(message, keyValuePairs...) +} + +// Info logs an error message, optionally structured with alternating key, value parameters. +func (l *LogService) Info(message string, keyValuePairs ...any) { + l.api.LogInfo(message, keyValuePairs...) +} + +// Debug logs an error message, optionally structured with alternating key, value parameters. +func (l *LogService) Debug(message string, keyValuePairs ...any) { + l.api.LogDebug(message, keyValuePairs...) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/pluginapi/logrus.go b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/logrus.go new file mode 100644 index 00000000..e5e65b9a --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/logrus.go @@ -0,0 +1,69 @@ +package pluginapi + +import ( + "fmt" + "io" + + "github.com/sirupsen/logrus" +) + +// LogrusHook is a logrus.Hook for emitting plugin logs through the RPC API for inclusion in the +// server logs. +// +// To configure the default Logrus logger for use with plugin logging, simply invoke: +// +// pluginapi.ConfigureLogrus(logrus.StandardLogger(), pluginAPIClient) +// +// Alternatively, construct your own logger to pass to pluginapi.ConfigureLogrus. +type LogrusHook struct { + log LogService +} + +// NewLogrusHook creates a new instance of LogrusHook. +func NewLogrusHook(log LogService) *LogrusHook { + return &LogrusHook{ + log: log, + } +} + +// Levels allows LogrusHook to process any log level. +func (lh *LogrusHook) Levels() []logrus.Level { + return logrus.AllLevels +} + +// Fire proxies logrus entries through the plugin API at the appropriate level. +func (lh *LogrusHook) Fire(entry *logrus.Entry) error { + fields := []any{} + for key, value := range entry.Data { + fields = append(fields, key, fmt.Sprintf("%+v", value)) + } + + if entry.Caller != nil { + fields = append(fields, "plugin_caller", fmt.Sprintf("%s:%d", entry.Caller.File, entry.Caller.Line)) + } + + switch entry.Level { + case logrus.PanicLevel, logrus.FatalLevel, logrus.ErrorLevel: + lh.log.Error(entry.Message, fields...) + case logrus.WarnLevel: + lh.log.Warn(entry.Message, fields...) + case logrus.InfoLevel: + lh.log.Info(entry.Message, fields...) + case logrus.DebugLevel, logrus.TraceLevel: + lh.log.Debug(entry.Message, fields...) + } + + return nil +} + +// ConfigureLogrus configures the given logrus logger with a hook to proxy through the RPC API, +// discarding the default output to avoid duplicating the events across the standard STDOUT proxy. +func ConfigureLogrus(logger *logrus.Logger, client *Client) { + hook := NewLogrusHook(client.Log) + logger.Hooks.Add(hook) + logger.SetOutput(io.Discard) + logrus.SetReportCaller(true) + + // By default, log everything to the server, and let it decide what gets through. + logrus.SetLevel(logrus.TraceLevel) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/pluginapi/oauth.go b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/oauth.go new file mode 100644 index 00000000..ef3dd8ee --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/oauth.go @@ -0,0 +1,55 @@ +package pluginapi + +import ( + "github.com/mattermost/mattermost/server/public/model" + "github.com/mattermost/mattermost/server/public/plugin" +) + +// UserService exposes methods to manipulate OAuth Apps. +type OAuthService struct { + api plugin.API +} + +// Create creates a new OAuth App. +// +// Minimum server version: 5.38 +func (o *OAuthService) Create(app *model.OAuthApp) error { + createdApp, appErr := o.api.CreateOAuthApp(app) + if appErr != nil { + return normalizeAppErr(appErr) + } + + *app = *createdApp + + return nil +} + +// Get gets an existing OAuth App by id. +// +// Minimum server version: 5.38 +func (o *OAuthService) Get(appID string) (*model.OAuthApp, error) { + app, appErr := o.api.GetOAuthApp(appID) + + return app, normalizeAppErr(appErr) +} + +// Update updates an existing OAuth App. +// +// Minimum server version: 5.38 +func (o *OAuthService) Update(app *model.OAuthApp) error { + updatedApp, appErr := o.api.UpdateOAuthApp(app) + if appErr != nil { + return normalizeAppErr(appErr) + } + + *app = *updatedApp + + return nil +} + +// Delete deletes an existing OAuth App by id. +// +// Minimum server version: 5.38 +func (o *OAuthService) Delete(appID string) error { + return normalizeAppErr(o.api.DeleteOAuthApp(appID)) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/pluginapi/plugins.go b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/plugins.go new file mode 100644 index 00000000..babe7d2b --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/plugins.go @@ -0,0 +1,114 @@ +package pluginapi + +import ( + "io" + "net/http" + "net/url" + "time" + + "github.com/pkg/errors" + + "github.com/mattermost/mattermost/server/public/model" + "github.com/mattermost/mattermost/server/public/plugin" +) + +// PluginService exposes methods to manipulate the set of plugins as well as communicate with +// other plugin instances. +type PluginService struct { + api plugin.API +} + +// List will return a list of plugin manifests for currently active plugins. +// +// Minimum server version: 5.6 +func (p *PluginService) List() ([]*model.Manifest, error) { + manifests, appErr := p.api.GetPlugins() + + return manifests, normalizeAppErr(appErr) +} + +// Install will upload another plugin with tar.gz file. +// Previous version will be replaced on replace true. +// +// Minimum server version: 5.18 +func (p *PluginService) Install(file io.Reader, replace bool) (*model.Manifest, error) { + manifest, appErr := p.api.InstallPlugin(file, replace) + + return manifest, normalizeAppErr(appErr) +} + +// InstallPluginFromURL installs the plugin from the provided url. +// +// Minimum server version: 5.18 +func (p *PluginService) InstallPluginFromURL(downloadURL string, replace bool) (*model.Manifest, error) { + err := ensureServerVersion(p.api, "5.18.0") + if err != nil { + return nil, err + } + + parsedURL, err := url.Parse(downloadURL) + if err != nil { + return nil, errors.Wrap(err, "error while parsing url") + } + + client := &http.Client{Timeout: time.Hour} + response, err := client.Get(parsedURL.String()) + if err != nil { + return nil, errors.Wrap(err, "unable to download the plugin") + } + defer response.Body.Close() + + if response.StatusCode != http.StatusOK { + return nil, errors.Errorf("received %d status code while downloading plugin from server", response.StatusCode) + } + + manifest, err := p.Install(response.Body, replace) + if err != nil { + return nil, errors.Wrap(err, "unable to install plugin on server") + } + + return manifest, nil +} + +// Enable will enable an plugin installed. +// +// Minimum server version: 5.6 +func (p *PluginService) Enable(id string) error { + appErr := p.api.EnablePlugin(id) + + return normalizeAppErr(appErr) +} + +// Disable will disable an enabled plugin. +// +// Minimum server version: 5.6 +func (p *PluginService) Disable(id string) error { + appErr := p.api.DisablePlugin(id) + + return normalizeAppErr(appErr) +} + +// Remove will disable and delete a plugin. +// +// Minimum server version: 5.6 +func (p *PluginService) Remove(id string) error { + appErr := p.api.RemovePlugin(id) + + return normalizeAppErr(appErr) +} + +// GetPluginStatus will return the status of a plugin. +// +// Minimum server version: 5.6 +func (p *PluginService) GetPluginStatus(id string) (*model.PluginStatus, error) { + pluginStatus, appErr := p.api.GetPluginStatus(id) + + return pluginStatus, normalizeAppErr(appErr) +} + +// HTTP allows inter-plugin requests to plugin APIs. +// +// Minimum server version: 5.18 +func (p *PluginService) HTTP(request *http.Request) *http.Response { + return p.api.PluginHTTP(request) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/pluginapi/post.go b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/post.go new file mode 100644 index 00000000..24ed2e12 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/post.go @@ -0,0 +1,337 @@ +package pluginapi + +import ( + "slices" + + "github.com/pkg/errors" + + "github.com/mattermost/mattermost/server/public/model" + "github.com/mattermost/mattermost/server/public/plugin" +) + +// PostService exposes methods to manipulate posts. +type PostService struct { + api plugin.API +} + +// CreatePost creates a post. +// +// Minimum server version: 5.2 +func (p *PostService) CreatePost(post *model.Post) error { + createdPost, appErr := p.api.CreatePost(post) + if appErr != nil { + return normalizeAppErr(appErr) + } + + err := createdPost.ShallowCopy(post) + if err != nil { + return err + } + + return nil +} + +// DM sends a post as a direct message +// +// Minimum server version: 5.2 +func (p *PostService) DM(senderUserID, receiverUserID string, post *model.Post) error { + channel, appErr := p.api.GetDirectChannel(senderUserID, receiverUserID) + if appErr != nil { + return normalizeAppErr(appErr) + } + post.ChannelId = channel.Id + post.UserId = senderUserID + return p.CreatePost(post) +} + +// GetPost gets a post. +// +// Minimum server version: 5.2 +func (p *PostService) GetPost(postID string) (*model.Post, error) { + post, appErr := p.api.GetPost(postID) + + return post, normalizeAppErr(appErr) +} + +// UpdatePost updates a post. +// +// Minimum server version: 5.2 +func (p *PostService) UpdatePost(post *model.Post) error { + updatedPost, appErr := p.api.UpdatePost(post) + if appErr != nil { + return normalizeAppErr(appErr) + } + + err := updatedPost.ShallowCopy(post) + if err != nil { + return err + } + + return nil +} + +// DeletePost deletes a post. +// +// Minimum server version: 5.2 +func (p *PostService) DeletePost(postID string) error { + return normalizeAppErr(p.api.DeletePost(postID)) +} + +// SendEphemeralPost creates an ephemeral post. +// +// Minimum server version: 5.2 +func (p *PostService) SendEphemeralPost(userID string, post *model.Post) { + *post = *p.api.SendEphemeralPost(userID, post) +} + +// UpdateEphemeralPost updates an ephemeral message previously sent to the user. +// EXPERIMENTAL: This API is experimental and can be changed without advance notice. +// +// Minimum server version: 5.2 +func (p *PostService) UpdateEphemeralPost(userID string, post *model.Post) { + *post = *p.api.UpdateEphemeralPost(userID, post) +} + +// DeleteEphemeralPost deletes an ephemeral message previously sent to the user. +// EXPERIMENTAL: This API is experimental and can be changed without advance notice. +// +// Minimum server version: 5.2 +func (p *PostService) DeleteEphemeralPost(userID, postID string) { + p.api.DeleteEphemeralPost(userID, postID) +} + +// GetPostThread gets a post with all the other posts in the same thread. +// +// Minimum server version: 5.6 +func (p *PostService) GetPostThread(postID string) (*model.PostList, error) { + postList, appErr := p.api.GetPostThread(postID) + + return postList, normalizeAppErr(appErr) +} + +// GetPostsSince gets posts created after a specified time as Unix time in milliseconds. +// +// Minimum server version: 5.6 +func (p *PostService) GetPostsSince(channelID string, time int64) (*model.PostList, error) { + postList, appErr := p.api.GetPostsSince(channelID, time) + + return postList, normalizeAppErr(appErr) +} + +// GetPostsAfter gets a page of posts that were posted after the post provided. +// +// Minimum server version: 5.6 +func (p *PostService) GetPostsAfter(channelID, postID string, page, perPage int) (*model.PostList, error) { + postList, appErr := p.api.GetPostsAfter(channelID, postID, page, perPage) + + return postList, normalizeAppErr(appErr) +} + +// GetPostsBefore gets a page of posts that were posted before the post provided. +// +// Minimum server version: 5.6 +func (p *PostService) GetPostsBefore(channelID, postID string, page, perPage int) (*model.PostList, error) { + postList, appErr := p.api.GetPostsBefore(channelID, postID, page, perPage) + + return postList, normalizeAppErr(appErr) +} + +// GetPostsForChannel gets a list of posts for a channel. +// +// Minimum server version: 5.6 +func (p *PostService) GetPostsForChannel(channelID string, page, perPage int) (*model.PostList, error) { + postList, appErr := p.api.GetPostsForChannel(channelID, page, perPage) + + return postList, normalizeAppErr(appErr) +} + +// SearchPostsInTeam returns a list of posts in a specific team that match the given params. +// +// Minimum server version: 5.10 +func (p *PostService) SearchPostsInTeam(teamID string, paramsList []*model.SearchParams) ([]*model.Post, error) { + postList, appErr := p.api.SearchPostsInTeam(teamID, paramsList) + + return postList, normalizeAppErr(appErr) +} + +// AddReaction add a reaction to a post. +// +// Minimum server version: 5.3 +func (p *PostService) AddReaction(reaction *model.Reaction) error { + addedReaction, appErr := p.api.AddReaction(reaction) + if appErr != nil { + return normalizeAppErr(appErr) + } + + *reaction = *addedReaction + + return nil +} + +// GetReactions get the reactions of a post. +// +// Minimum server version: 5.3 +func (p *PostService) GetReactions(postID string) ([]*model.Reaction, error) { + reactions, appErr := p.api.GetReactions(postID) + + return reactions, normalizeAppErr(appErr) +} + +// RemoveReaction remove a reaction from a post. +// +// Minimum server version: 5.3 +func (p *PostService) RemoveReaction(reaction *model.Reaction) error { + return normalizeAppErr(p.api.RemoveReaction(reaction)) +} + +type ShouldProcessMessageOption func(*shouldProcessMessageOptions) + +type shouldProcessMessageOptions struct { + AllowSystemMessages bool + AllowBots bool + AllowWebhook bool + FilterChannelIDs []string + FilterUserIDs []string + OnlyBotDMs bool + BotID string +} + +// AllowSystemMessages configures a call to ShouldProcessMessage to return true for system messages. +// +// As it is typically desirable only to consume messages from users of the system, ShouldProcessMessage ignores system messages by default. +func AllowSystemMessages() ShouldProcessMessageOption { + return func(options *shouldProcessMessageOptions) { + options.AllowSystemMessages = true + } +} + +// AllowBots configures a call to ShouldProcessMessage to return true for bot posts. +// +// As it is typically desirable only to consume messages from human users of the system, ShouldProcessMessage ignores bot messages by default. +// When allowing bots, take care to avoid a loop where two plugins respond to each others posts repeatedly. +func AllowBots() ShouldProcessMessageOption { + return func(options *shouldProcessMessageOptions) { + options.AllowBots = true + } +} + +// AllowWebhook configures a call to ShouldProcessMessage to return true for posts from webhook. +// +// As it is typically desirable only to consume messages from human users of the system, ShouldProcessMessage ignores webhook messages by default. +func AllowWebhook() ShouldProcessMessageOption { + return func(options *shouldProcessMessageOptions) { + options.AllowWebhook = true + } +} + +// FilterChannelIDs configures a call to ShouldProcessMessage to return true only for the given channels. +// +// By default, posts from all channels are allowed to be processed. +func FilterChannelIDs(filterChannelIDs []string) ShouldProcessMessageOption { + return func(options *shouldProcessMessageOptions) { + options.FilterChannelIDs = filterChannelIDs + } +} + +// FilterUserIDs configures a call to ShouldProcessMessage to return true only for the given users. +// +// By default, posts from all non-bot users are allowed. +func FilterUserIDs(filterUserIDs []string) ShouldProcessMessageOption { + return func(options *shouldProcessMessageOptions) { + options.FilterUserIDs = filterUserIDs + } +} + +// OnlyBotDMs configures a call to ShouldProcessMessage to return true only for direct messages sent to the bot created by EnsureBot. +// +// By default, posts from all channels are allowed. +func OnlyBotDMs() ShouldProcessMessageOption { + return func(options *shouldProcessMessageOptions) { + options.OnlyBotDMs = true + } +} + +// If provided, BotID configures ShouldProcessMessage to skip its retrieval from the store. +// +// By default, posts from all non-bot users are allowed. +func BotID(botID string) ShouldProcessMessageOption { + return func(options *shouldProcessMessageOptions) { + options.BotID = botID + } +} + +// ShouldProcessMessage returns if the message should be processed by a message hook. +// +// Use this method to avoid processing unnecessary messages in a MessageHasBeenPosted +// or MessageWillBePosted hook, and indeed in some cases avoid an infinite loop between +// two automated bots or plugins. +// +// The behavior is customizable using the given options, since plugin needs may vary. +// By default, system messages and messages from bots will be skipped. +// +// Minimum server version: 5.2 +func (p *PostService) ShouldProcessMessage(post *model.Post, options ...ShouldProcessMessageOption) (bool, error) { + messageProcessOptions := &shouldProcessMessageOptions{} + for _, option := range options { + option(messageProcessOptions) + } + + var botIDBytes []byte + var kvGetErr *model.AppError + + if messageProcessOptions.BotID != "" { + botIDBytes = []byte(messageProcessOptions.BotID) + } else { + botIDBytes, kvGetErr = p.api.KVGet(botUserKey) + + if kvGetErr != nil { + return false, errors.Wrap(kvGetErr, "failed to get bot") + } + } + + if botIDBytes != nil { + if post.UserId == string(botIDBytes) { + return false, nil + } + } + + if post.IsSystemMessage() && !messageProcessOptions.AllowSystemMessages { + return false, nil + } + + if !messageProcessOptions.AllowWebhook && post.GetProp(model.PostPropsFromWebhook) == "true" { + return false, nil + } + + if !messageProcessOptions.AllowBots { + user, appErr := p.api.GetUser(post.UserId) + if appErr != nil { + return false, errors.Wrap(appErr, "unable to get user") + } + + if user.IsBot { + return false, nil + } + } + + if len(messageProcessOptions.FilterChannelIDs) != 0 && !slices.Contains(messageProcessOptions.FilterChannelIDs, post.ChannelId) { + return false, nil + } + + if len(messageProcessOptions.FilterUserIDs) != 0 && !slices.Contains(messageProcessOptions.FilterUserIDs, post.UserId) { + return false, nil + } + + if botIDBytes != nil && messageProcessOptions.OnlyBotDMs { + channel, appErr := p.api.GetChannel(post.ChannelId) + if appErr != nil { + return false, errors.Wrap(appErr, "unable to get channel") + } + + if !model.IsBotDMChannel(channel, string(botIDBytes)) { + return false, nil + } + } + + return true, nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/pluginapi/property.go b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/property.go new file mode 100644 index 00000000..79175b64 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/property.go @@ -0,0 +1,172 @@ +package pluginapi + +import ( + "github.com/mattermost/mattermost/server/public/model" + "github.com/mattermost/mattermost/server/public/plugin" +) + +// PropertyService exposes methods to manipulate property fields and values. +type PropertyService struct { + api plugin.API +} + +// CreatePropertyField creates a new property field. +// +// Minimum server version: 10.10 +func (p *PropertyService) CreatePropertyField(field *model.PropertyField) (*model.PropertyField, error) { + return p.api.CreatePropertyField(field) +} + +// GetPropertyField gets a property field by groupID and fieldID. +// +// Minimum server version: 10.10 +func (p *PropertyService) GetPropertyField(groupID, fieldID string) (*model.PropertyField, error) { + return p.api.GetPropertyField(groupID, fieldID) +} + +// GetPropertyFields gets multiple property fields by groupID and a list of IDs. +// +// Minimum server version: 10.10 +func (p *PropertyService) GetPropertyFields(groupID string, ids []string) ([]*model.PropertyField, error) { + return p.api.GetPropertyFields(groupID, ids) +} + +// UpdatePropertyField updates an existing property field. +// +// Minimum server version: 10.10 +func (p *PropertyService) UpdatePropertyField(groupID string, field *model.PropertyField) (*model.PropertyField, error) { + return p.api.UpdatePropertyField(groupID, field) +} + +// DeletePropertyField deletes a property field (soft delete). +// +// Minimum server version: 10.10 +func (p *PropertyService) DeletePropertyField(groupID, fieldID string) error { + return p.api.DeletePropertyField(groupID, fieldID) +} + +// SearchPropertyFields searches for property fields with filtering options. +// +// Minimum server version: 11.0 +func (p *PropertyService) SearchPropertyFields(groupID string, opts model.PropertyFieldSearchOpts) ([]*model.PropertyField, error) { + return p.api.SearchPropertyFields(groupID, opts) +} + +// CountPropertyFields counts property fields for a group. +// +// Minimum server version: 11.0 +func (p *PropertyService) CountPropertyFields(groupID string, includeDeleted bool) (int64, error) { + return p.api.CountPropertyFields(groupID, includeDeleted) +} + +// CountPropertyFieldsForTarget counts property fields for a specific target. +// +// Minimum server version: 11.0 +func (p *PropertyService) CountPropertyFieldsForTarget(groupID, targetType, targetID string, includeDeleted bool) (int64, error) { + return p.api.CountPropertyFieldsForTarget(groupID, targetType, targetID, includeDeleted) +} + +// CreatePropertyValue creates a new property value. +// +// Minimum server version: 10.10 +func (p *PropertyService) CreatePropertyValue(value *model.PropertyValue) (*model.PropertyValue, error) { + return p.api.CreatePropertyValue(value) +} + +// GetPropertyValue gets a property value by groupID and valueID. +// +// Minimum server version: 10.10 +func (p *PropertyService) GetPropertyValue(groupID, valueID string) (*model.PropertyValue, error) { + return p.api.GetPropertyValue(groupID, valueID) +} + +// GetPropertyValues gets multiple property values by groupID and a list of IDs. +// +// Minimum server version: 10.10 +func (p *PropertyService) GetPropertyValues(groupID string, ids []string) ([]*model.PropertyValue, error) { + return p.api.GetPropertyValues(groupID, ids) +} + +// UpdatePropertyValue updates an existing property value. +// +// Minimum server version: 10.10 +func (p *PropertyService) UpdatePropertyValue(groupID string, value *model.PropertyValue) (*model.PropertyValue, error) { + return p.api.UpdatePropertyValue(groupID, value) +} + +// UpsertPropertyValue creates a new property value or updates it if it already exists. +// +// Minimum server version: 10.10 +func (p *PropertyService) UpsertPropertyValue(value *model.PropertyValue) (*model.PropertyValue, error) { + return p.api.UpsertPropertyValue(value) +} + +// DeletePropertyValue deletes a property value (soft delete). +// +// Minimum server version: 10.10 +func (p *PropertyService) DeletePropertyValue(groupID, valueID string) error { + return p.api.DeletePropertyValue(groupID, valueID) +} + +// SearchPropertyValues searches for property values with filtering options. +// +// Minimum server version: 11.0 +func (p *PropertyService) SearchPropertyValues(groupID string, opts model.PropertyValueSearchOpts) ([]*model.PropertyValue, error) { + return p.api.SearchPropertyValues(groupID, opts) +} + +// RegisterPropertyGroup registers a new property group. +// +// Minimum server version: 10.10 +func (p *PropertyService) RegisterPropertyGroup(name string) (*model.PropertyGroup, error) { + return p.api.RegisterPropertyGroup(name) +} + +// GetPropertyGroup gets a property group by name. +// +// Minimum server version: 10.10 +func (p *PropertyService) GetPropertyGroup(name string) (*model.PropertyGroup, error) { + return p.api.GetPropertyGroup(name) +} + +// GetPropertyFieldByName gets a property field by groupID, targetID and name. +// +// Minimum server version: 10.10 +func (p *PropertyService) GetPropertyFieldByName(groupID, targetID, name string) (*model.PropertyField, error) { + return p.api.GetPropertyFieldByName(groupID, targetID, name) +} + +// UpdatePropertyFields updates multiple property fields in a single operation. +// +// Minimum server version: 10.10 +func (p *PropertyService) UpdatePropertyFields(groupID string, fields []*model.PropertyField) ([]*model.PropertyField, error) { + return p.api.UpdatePropertyFields(groupID, fields) +} + +// UpdatePropertyValues updates multiple property values in a single operation. +// +// Minimum server version: 10.10 +func (p *PropertyService) UpdatePropertyValues(groupID string, values []*model.PropertyValue) ([]*model.PropertyValue, error) { + return p.api.UpdatePropertyValues(groupID, values) +} + +// UpsertPropertyValues creates or updates multiple property values in a single operation. +// +// Minimum server version: 10.10 +func (p *PropertyService) UpsertPropertyValues(values []*model.PropertyValue) ([]*model.PropertyValue, error) { + return p.api.UpsertPropertyValues(values) +} + +// DeletePropertyValuesForTarget deletes all property values for a specific target. +// +// Minimum server version: 10.10 +func (p *PropertyService) DeletePropertyValuesForTarget(groupID, targetType, targetID string) error { + return p.api.DeletePropertyValuesForTarget(groupID, targetType, targetID) +} + +// DeletePropertyValuesForField deletes all property values for a specific field. +// +// Minimum server version: 10.10 +func (p *PropertyService) DeletePropertyValuesForField(groupID, fieldID string) error { + return p.api.DeletePropertyValuesForField(groupID, fieldID) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/pluginapi/session.go b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/session.go new file mode 100644 index 00000000..05c11408 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/session.go @@ -0,0 +1,43 @@ +package pluginapi + +import ( + "github.com/mattermost/mattermost/server/public/model" + "github.com/mattermost/mattermost/server/public/plugin" +) + +// SessionService exposes methods to manipulate groups. +type SessionService struct { + api plugin.API +} + +// Get returns the session object for the Session ID +// +// Minimum server version: 5.2 +func (s *SessionService) Get(id string) (*model.Session, error) { + session, appErr := s.api.GetSession(id) + + return session, normalizeAppErr(appErr) +} + +// Create creates a new user session. +// +// Minimum server version: 6.2 +func (s *SessionService) Create(session *model.Session) (*model.Session, error) { + session, appErr := s.api.CreateSession(session) + + return session, normalizeAppErr(appErr) +} + +// ExtendSessionExpiry extends the duration of an existing session. +// +// Minimum server version: 6.2 +func (s *SessionService) ExtendExpiry(sessionID string, newExpiry int64) error { + return normalizeAppErr(s.api.ExtendSessionExpiry(sessionID, newExpiry)) +} + +// RevokeSession revokes an existing user session. +// +// Minimum server version: 6.2 +func (s *SessionService) Revoke(sessionID string) error { + return normalizeAppErr(s.api.RevokeSession(sessionID)) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/pluginapi/slashcommand.go b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/slashcommand.go new file mode 100644 index 00000000..7fe939e5 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/slashcommand.go @@ -0,0 +1,100 @@ +package pluginapi + +import ( + "github.com/mattermost/mattermost/server/public/model" + "github.com/mattermost/mattermost/server/public/plugin" +) + +// SlashCommandService exposes methods to manipulate slash commands. +type SlashCommandService struct { + api plugin.API +} + +// Register registers a custom slash command. When the command is triggered, your plugin +// can fulfill it via the ExecuteCommand hook. +// +// Minimum server version: 5.2 +func (c *SlashCommandService) Register(command *model.Command) error { + return c.api.RegisterCommand(command) +} + +// Unregister unregisters a command previously registered via Register. +// +// Minimum server version: 5.2 +func (c *SlashCommandService) Unregister(teamID, trigger string) error { + return c.api.UnregisterCommand(teamID, trigger) +} + +// Execute executes a slash command. +// +// Minimum server version: 5.26 +func (c *SlashCommandService) Execute(command *model.CommandArgs) (*model.CommandResponse, error) { + return c.api.ExecuteSlashCommand(command) +} + +// Create creates a server-owned slash command that is not handled by the plugin +// itself, and which will persist past the life of the plugin. The command will have its +// CreatorId set to "" and its PluginId set to the id of the plugin that created it. +// +// Minimum server version: 5.28 +func (c *SlashCommandService) Create(command *model.Command) (*model.Command, error) { + return c.api.CreateCommand(command) +} + +// List returns the list of all slash commands for teamID. E.g., custom commands +// (those created through the integrations menu, the REST api, or the plugin api CreateCommand), +// plugin commands (those created with plugin api RegisterCommand), and builtin commands +// (those added internally through RegisterCommandProvider). +// +// Minimum server version: 5.28 +func (c *SlashCommandService) List(teamID string) ([]*model.Command, error) { + return c.api.ListCommands(teamID) +} + +// ListCustom returns the list of slash commands for teamID that where created +// through the integrations menu, the REST api, or the plugin api CreateCommand. +// +// Minimum server version: 5.28 +func (c *SlashCommandService) ListCustom(teamID string) ([]*model.Command, error) { + return c.api.ListCustomCommands(teamID) +} + +// ListPlugin returns the list of slash commands for teamID that were created +// with the plugin api RegisterCommand. +// +// Minimum server version: 5.28 +func (c *SlashCommandService) ListPlugin(teamID string) ([]*model.Command, error) { + return c.api.ListPluginCommands(teamID) +} + +// ListBuiltIn returns the list of slash commands that are builtin commands +// (those added internally through RegisterCommandProvider). +// +// Minimum server version: 5.28 +func (c *SlashCommandService) ListBuiltIn() ([]*model.Command, error) { + return c.api.ListBuiltInCommands() +} + +// Get returns the command definition based on a command id string. +// +// Minimum server version: 5.28 +func (c *SlashCommandService) Get(commandID string) (*model.Command, error) { + return c.api.GetCommand(commandID) +} + +// Update updates a single command (identified by commandID) with the information provided in the +// updatedCmd model.Command struct. The following fields in the command cannot be updated: +// Id, Token, CreateAt, DeleteAt, and PluginId. If updatedCmd.TeamId is blank, it +// will be set to commandID's TeamId. +// +// Minimum server version: 5.28 +func (c *SlashCommandService) Update(commandID string, updatedCmd *model.Command) (*model.Command, error) { + return c.api.UpdateCommand(commandID, updatedCmd) +} + +// Delete deletes a slash command (identified by commandID). +// +// Minimum server version: 5.28 +func (c *SlashCommandService) Delete(commandID string) error { + return c.api.DeleteCommand(commandID) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/pluginapi/store.go b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/store.go new file mode 100644 index 00000000..59e5403e --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/store.go @@ -0,0 +1,127 @@ +package pluginapi + +import ( + "database/sql" + "sync" + + // import sql drivers + _ "github.com/lib/pq" + + "github.com/mattermost/mattermost/server/public/plugin" + "github.com/mattermost/mattermost/server/public/shared/driver" + "github.com/pkg/errors" +) + +// StoreService exposes the underlying database. +type StoreService struct { + initializedMaster bool + initializedReplica bool + api plugin.API + driver plugin.Driver + mutex sync.Mutex + + masterDB *sql.DB + replicaDB *sql.DB +} + +// GetMasterDB gets the master database handle. +// +// Minimum server version: 5.16 +func (s *StoreService) GetMasterDB() (*sql.DB, error) { + s.mutex.Lock() + defer s.mutex.Unlock() + + if err := s.initializeMaster(); err != nil { + return nil, err + } + + return s.masterDB, nil +} + +// GetReplicaDB gets the replica database handle. +// Returns masterDB if a replica is not configured. +// +// Minimum server version: 5.16 +func (s *StoreService) GetReplicaDB() (*sql.DB, error) { + s.mutex.Lock() + defer s.mutex.Unlock() + + if err := s.initializeReplica(); err != nil { + return nil, err + } + + if s.replicaDB != nil { + return s.replicaDB, nil + } + + if err := s.initializeMaster(); err != nil { + return nil, err + } + + return s.masterDB, nil +} + +// Close closes any open resources. This method is idempotent. +func (s *StoreService) Close() error { + s.mutex.Lock() + defer s.mutex.Unlock() + + if s.replicaDB != nil { + if err := s.replicaDB.Close(); err != nil { + return err + } + } + + if s.masterDB != nil { + if err := s.masterDB.Close(); err != nil { + return err + } + } + + return nil +} + +// DriverName returns the driver name for the datasource. +func (s *StoreService) DriverName() string { + return *s.api.GetConfig().SqlSettings.DriverName +} + +func (s *StoreService) initializeMaster() error { + if s.initializedMaster { + return nil + } + + if s.driver == nil { + return errors.New("no db driver was provided") + } + + // Set up master db + db := sql.OpenDB(driver.NewConnector(s.driver, true /* IsMaster */)) + if err := db.Ping(); err != nil { + return errors.Wrap(err, "failed to connect to master db") + } + s.masterDB = db + + s.initializedMaster = true + + return nil +} + +func (s *StoreService) initializeReplica() error { + if s.initializedReplica { + return nil + } + + config := s.api.GetUnsanitizedConfig() + // Set up replica db + if len(config.SqlSettings.DataSourceReplicas) > 0 { + db := sql.OpenDB(driver.NewConnector(s.driver, false /* IsMaster */)) + if err := db.Ping(); err != nil { + return errors.Wrap(err, "failed to connect to replica db") + } + s.replicaDB = db + } + + s.initializedReplica = true + return nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/pluginapi/system.go b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/system.go new file mode 100644 index 00000000..2e78d4a3 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/system.go @@ -0,0 +1,182 @@ +package pluginapi + +import ( + "net/url" + "os" + "path" + filePath "path" + "time" + + "github.com/blang/semver/v4" + "github.com/pkg/errors" + "gopkg.in/yaml.v3" + + "github.com/mattermost/mattermost/server/public/model" + "github.com/mattermost/mattermost/server/public/plugin" +) + +// SystemService exposes methods to query system properties. +type SystemService struct { + api plugin.API +} + +// GetManifest returns the manifest from the plugin bundle. +// +// Minimum server version: 5.10 +func (s *SystemService) GetManifest() (*model.Manifest, error) { + p, err := s.api.GetBundlePath() + if err != nil { + return nil, err + } + + m, _, err := model.FindManifest(p) + if err != nil { + return nil, errors.Wrap(err, "failed to find and open manifest") + } + + return m, nil +} + +// GetBundlePath returns the absolute path where the plugin's bundle was unpacked. +// +// Minimum server version: 5.10 +func (s *SystemService) GetBundlePath() (string, error) { + return s.api.GetBundlePath() +} + +// GetPluginAssetURL builds a URL to the given asset in the assets directory. +// Use this URL to link to assets from the webapp, or for third-party integrations with your plugin. +// +// Minimum server version: 5.2 +func (s *SystemService) GetPluginAssetURL(pluginID, asset string) (string, error) { + if pluginID == "" { + return "", errors.New("empty pluginID provided") + } + + if asset == "" { + return "", errors.New("empty asset name provided") + } + + siteURL := *s.api.GetConfig().ServiceSettings.SiteURL + if siteURL == "" { + return "", errors.New("no SiteURL configured by the server") + } + + u, err := url.Parse(siteURL + path.Join("/", pluginID, asset)) + if err != nil { + return "", err + } + + return u.String(), nil +} + +// GetLicense returns the current license used by the Mattermost server. Returns nil if the +// the server does not have a license. +// +// Minimum server version: 5.10 +func (s *SystemService) GetLicense() *model.License { + return s.api.GetLicense() +} + +// GetServerVersion return the current Mattermost server version +// +// Minimum server version: 5.4 +func (s *SystemService) GetServerVersion() string { + return s.api.GetServerVersion() +} + +// IsEnterpriseReady returns true if the Mattermost server is configured as Enterprise Ready. +// +// Minimum server version: 6.1 +func (s *SystemService) IsEnterpriseReady() bool { + return s.api.IsEnterpriseReady() +} + +// GetSystemInstallDate returns the time that Mattermost was first installed and ran. +// +// Minimum server version: 5.10 +func (s *SystemService) GetSystemInstallDate() (time.Time, error) { + installDateMS, appErr := s.api.GetSystemInstallDate() + installDate := time.Unix(0, installDateMS*int64(time.Millisecond)) + + return installDate, normalizeAppErr(appErr) +} + +// GetDiagnosticID returns a unique identifier used by the server for diagnostic reports. +// +// Minimum server version: 5.10 +// +// Deprecated: Use GetTelemetryID instead. It returns the same value. +func (s *SystemService) GetDiagnosticID() string { + // TODO: Consider deprecating/rewriting in favor of just using GetUnsanitizedConfig(). + return s.api.GetDiagnosticId() +} + +// GetTelemetryID returns a unique identifier used by the server for telemetry reports. +// +// Minimum server version: 5.10 +func (s *SystemService) GetTelemetryID() string { + return s.api.GetTelemetryId() +} + +// RequestTrialLicense requests a trial license and installs it in the server. +// If the server version is lower than 5.36.0, an error is returned. +// +// Minimum server version: 5.36 +func (s *SystemService) RequestTrialLicense(requesterID string, users int, termsAccepted, receiveEmailsAccepted bool) error { + currentVersion := semver.MustParse(s.api.GetServerVersion()) + requiredVersion := semver.MustParse("5.36.0") + + if currentVersion.LT(requiredVersion) { + return errors.Errorf("current server version is lower than 5.36") + } + + err := s.api.RequestTrialLicense(requesterID, users, termsAccepted, receiveEmailsAccepted) + return normalizeAppErr(err) +} + +// GeneratePacketMetadata generates metadata for Customer Packets, encods it to YAML and saves it to a file +// defined by the path parameter. +// pluginMeta should contain the values that plugin wants to insert into the standard metadata. +// +// The plugin_id and plugin_version will be used from the manifest. +// If pluginMeta contains plugin_id or plugin_version, they will be overridden. +// +// It returns the path to the file where the metadata was saved. +// +// @tag Metadata +// Minimum server version: 5.10 +func (s *SystemService) GeneratePacketMetadata(path string, pluginMeta map[string]any) (string, error) { + manifest, err := s.GetManifest() + if err != nil { + return "", errors.Wrap(err, "failed to get manifest") + } + license := s.GetLicense() + serverID := s.GetTelemetryID() + + if pluginMeta == nil { + pluginMeta = make(map[string]any) + } + + // we override the plugin_id and version fields from the manifest + pluginMeta["plugin_id"] = manifest.Id + pluginMeta["plugin_version"] = manifest.Version + + md, err := model.GeneratePacketMetadata(model.PluginPacketType, serverID, license, pluginMeta) + if err != nil { + return "", errors.Wrap(err, "failed to get packet metadata") + } + filePath := filePath.Join(path, model.PacketMetadataFileName) + f, err := os.Create(filePath) + if err != nil { + return "", errors.Wrap(err, "failed to create packet metadata file") + } + defer f.Close() + + err = yaml.NewEncoder(f).Encode(md) + if err != nil { + return "", errors.Wrap(err, "failed to create packet metadata file") + } + + return filePath, nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/pluginapi/team.go b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/team.go new file mode 100644 index 00000000..b3fb9f05 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/team.go @@ -0,0 +1,231 @@ +package pluginapi + +import ( + "bytes" + "io" + + "github.com/mattermost/mattermost/server/public/model" + "github.com/mattermost/mattermost/server/public/plugin" +) + +// TeamService exposes methods to manipulate teams and their members. +type TeamService struct { + api plugin.API +} + +// Get gets a team. +// +// Minimum server version: 5.2 +func (t *TeamService) Get(teamID string) (*model.Team, error) { + team, appErr := t.api.GetTeam(teamID) + + return team, normalizeAppErr(appErr) +} + +// GetByName gets a team by its name. +// +// Minimum server version: 5.2 +func (t *TeamService) GetByName(name string) (*model.Team, error) { + team, appErr := t.api.GetTeamByName(name) + + return team, normalizeAppErr(appErr) +} + +// TeamListOption is used to filter team listing. +type TeamListOption func(*ListTeamsOptions) + +// ListTeamsOptions holds options about filter out team listing. +type ListTeamsOptions struct { + UserID string +} + +// FilterTeamsByUser option is used to filter teams by user. +func FilterTeamsByUser(userID string) TeamListOption { + return func(o *ListTeamsOptions) { + o.UserID = userID + } +} + +// List gets a list of teams by options. +// +// Minimum server version: 5.2 +// Minimum server version when LimitTeamsToUser() option is used: 5.6 +func (t *TeamService) List(options ...TeamListOption) ([]*model.Team, error) { + opts := ListTeamsOptions{} + for _, o := range options { + o(&opts) + } + + var teams []*model.Team + var appErr *model.AppError + if opts.UserID != "" { + teams, appErr = t.api.GetTeamsForUser(opts.UserID) + } else { + teams, appErr = t.api.GetTeams() + } + + return teams, normalizeAppErr(appErr) +} + +// Search search a team. +// +// Minimum server version: 5.8 +func (t *TeamService) Search(term string) ([]*model.Team, error) { + teams, appErr := t.api.SearchTeams(term) + + return teams, normalizeAppErr(appErr) +} + +// Create creates a team. +// +// Minimum server version: 5.2 +func (t *TeamService) Create(team *model.Team) error { + createdTeam, appErr := t.api.CreateTeam(team) + if appErr != nil { + return normalizeAppErr(appErr) + } + + *team = *createdTeam + + return nil +} + +// Update updates a team. +// +// Minimum server version: 5.2 +func (t *TeamService) Update(team *model.Team) error { + updatedTeam, appErr := t.api.UpdateTeam(team) + if appErr != nil { + return normalizeAppErr(appErr) + } + + *team = *updatedTeam + + return nil +} + +// Delete deletes a team. +// +// Minimum server version: 5.2 +func (t *TeamService) Delete(teamID string) error { + return normalizeAppErr(t.api.DeleteTeam(teamID)) +} + +// GetIcon gets the team icon. +// +// Minimum server version: 5.6 +func (t *TeamService) GetIcon(teamID string) (io.Reader, error) { + contentBytes, appErr := t.api.GetTeamIcon(teamID) + if appErr != nil { + return nil, normalizeAppErr(appErr) + } + + return bytes.NewReader(contentBytes), nil +} + +// SetIcon sets the team icon. +// +// Minimum server version: 5.6 +func (t *TeamService) SetIcon(teamID string, content io.Reader) error { + contentBytes, err := io.ReadAll(content) + if err != nil { + return err + } + + return normalizeAppErr(t.api.SetTeamIcon(teamID, contentBytes)) +} + +// DeleteIcon removes the team icon. +// +// Minimum server version: 5.6 +func (t *TeamService) DeleteIcon(teamID string) error { + return normalizeAppErr(t.api.RemoveTeamIcon(teamID)) +} + +// GetUsers lists users of the team. +// +// Minimum server version: 5.6 +func (t *TeamService) ListUsers(teamID string, page, count int) ([]*model.User, error) { + users, appErr := t.api.GetUsersInTeam(teamID, page, count) + + return users, normalizeAppErr(appErr) +} + +// ListUnreadForUser gets the unread message and mention counts for each team to which the given user belongs. +// +// Minimum server version: 5.6 +func (t *TeamService) ListUnreadForUser(userID string) ([]*model.TeamUnread, error) { + teamUnreads, appErr := t.api.GetTeamsUnreadForUser(userID) + + return teamUnreads, normalizeAppErr(appErr) +} + +// GetMember returns a specific membership. +// +// Minimum server version: 5.2 +func (t *TeamService) GetMember(teamID, userID string) (*model.TeamMember, error) { + teamMember, appErr := t.api.GetTeamMember(teamID, userID) + + return teamMember, normalizeAppErr(appErr) +} + +// ListMembers returns the memberships of a specific team. +// +// Minimum server version: 5.2 +func (t *TeamService) ListMembers(teamID string, page, perPage int) ([]*model.TeamMember, error) { + teamMembers, appErr := t.api.GetTeamMembers(teamID, page, perPage) + + return teamMembers, normalizeAppErr(appErr) +} + +// ListMembersForUser returns all team memberships for a user. +// +// Minimum server version: 5.10 +func (t *TeamService) ListMembersForUser(userID string, page, perPage int) ([]*model.TeamMember, error) { + teamMembers, appErr := t.api.GetTeamMembersForUser(userID, page, perPage) + + return teamMembers, normalizeAppErr(appErr) +} + +// CreateMember creates a team membership. +// +// Minimum server version: 5.2 +func (t *TeamService) CreateMember(teamID, userID string) (*model.TeamMember, error) { + teamMember, appErr := t.api.CreateTeamMember(teamID, userID) + + return teamMember, normalizeAppErr(appErr) +} + +// CreateMembers creates a team membership for all provided user ids. +// +// Minimum server version: 5.2 +func (t *TeamService) CreateMembers(teamID string, userIDs []string, requestorID string) ([]*model.TeamMember, error) { + teamMembers, appErr := t.api.CreateTeamMembers(teamID, userIDs, requestorID) + + return teamMembers, normalizeAppErr(appErr) +} + +// DeleteMember deletes a team membership. +// +// Minimum server version: 5.2 +func (t *TeamService) DeleteMember(teamID, userID, requestorID string) error { + return normalizeAppErr(t.api.DeleteTeamMember(teamID, userID, requestorID)) +} + +// UpdateMemberRoles updates the role for a team membership. +// +// Minimum server version: 5.2 +func (t *TeamService) UpdateMemberRoles(teamID, userID, newRoles string) (*model.TeamMember, error) { + teamMember, appErr := t.api.UpdateTeamMemberRoles(teamID, userID, newRoles) + + return teamMember, normalizeAppErr(appErr) +} + +// GetStats gets a team's statistics +// +// Minimum server version: 5.8 +func (t *TeamService) GetStats(teamID string) (*model.TeamStats, error) { + teamStats, appErr := t.api.GetTeamStats(teamID) + + return teamStats, normalizeAppErr(appErr) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/pluginapi/user.go b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/user.go new file mode 100644 index 00000000..e8f72f2e --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/user.go @@ -0,0 +1,264 @@ +package pluginapi + +import ( + "bytes" + "io" + + "github.com/mattermost/mattermost/server/public/model" + "github.com/mattermost/mattermost/server/public/plugin" +) + +// UserService exposes methods to manipulate users. +type UserService struct { + api plugin.API +} + +// Get gets a user. +// +// Minimum server version: 5.2 +func (u *UserService) Get(userID string) (*model.User, error) { + user, appErr := u.api.GetUser(userID) + + return user, normalizeAppErr(appErr) +} + +// GetByEmail gets a user by their email address. +// +// Minimum server version: 5.2 +func (u *UserService) GetByEmail(email string) (*model.User, error) { + user, appErr := u.api.GetUserByEmail(email) + + return user, normalizeAppErr(appErr) +} + +// GetByUsername gets a user by their username. +// +// Minimum server version: 5.2 +func (u *UserService) GetByUsername(username string) (*model.User, error) { + user, appErr := u.api.GetUserByUsername(username) + + return user, normalizeAppErr(appErr) +} + +// List a list of users based on search options. +// +// Minimum server version: 5.10 +func (u *UserService) List(options *model.UserGetOptions) ([]*model.User, error) { + users, appErr := u.api.GetUsers(options) + + return users, normalizeAppErr(appErr) +} + +// ListByUserIDs gets users by their IDs. +// +// Minimum server version: 9.8 +func (u *UserService) ListByUserIDs(userIDs []string) ([]*model.User, error) { + users, appErr := u.api.GetUsersByIds(userIDs) + + return users, normalizeAppErr(appErr) +} + +// ListByUsernames gets users by their usernames. +// +// Minimum server version: 5.6 +func (u *UserService) ListByUsernames(usernames []string) ([]*model.User, error) { + users, appErr := u.api.GetUsersByUsernames(usernames) + + return users, normalizeAppErr(appErr) +} + +// ListInChannel returns a page of users in a channel. Page counting starts at 0. +// The sortBy parameter can be: "username" or "status". +// +// Minimum server version: 5.6 +func (u *UserService) ListInChannel(channelID, sortBy string, page, perPage int) ([]*model.User, error) { + users, appErr := u.api.GetUsersInChannel(channelID, sortBy, page, perPage) + + return users, normalizeAppErr(appErr) +} + +// ListInTeam gets users in team. +// +// Minimum server version: 5.6 +func (u *UserService) ListInTeam(teamID string, page, perPage int) ([]*model.User, error) { + users, appErr := u.api.GetUsersInTeam(teamID, page, perPage) + + return users, normalizeAppErr(appErr) +} + +// Search returns a list of users based on some search criteria. +// +// Minimum server version: 5.6 +func (u *UserService) Search(search *model.UserSearch) ([]*model.User, error) { + users, appErr := u.api.SearchUsers(search) + + return users, normalizeAppErr(appErr) +} + +// Create creates a user. +// +// Minimum server version: 5.2 +func (u *UserService) Create(user *model.User) error { + createdUser, appErr := u.api.CreateUser(user) + if appErr != nil { + return normalizeAppErr(appErr) + } + + *user = *createdUser + + return nil +} + +// Update updates a user. +// +// Minimum server version: 5.2 +func (u *UserService) Update(user *model.User) error { + updatedUser, appErr := u.api.UpdateUser(user) + if appErr != nil { + return normalizeAppErr(appErr) + } + + *user = *updatedUser + + return nil +} + +// Delete deletes a user. +// +// Minimum server version: 5.2 +func (u *UserService) Delete(userID string) error { + appErr := u.api.DeleteUser(userID) + + return normalizeAppErr(appErr) +} + +// GetStatus will get a user's status. +// +// Minimum server version: 5.2 +func (u *UserService) GetStatus(userID string) (*model.Status, error) { + status, appErr := u.api.GetUserStatus(userID) + + return status, normalizeAppErr(appErr) +} + +// ListStatusesByIDs will return a list of user statuses based on the provided slice of user IDs. +// +// Minimum server version: 5.2 +func (u *UserService) ListStatusesByIDs(userIDs []string) ([]*model.Status, error) { + statuses, appErr := u.api.GetUserStatusesByIds(userIDs) + + return statuses, normalizeAppErr(appErr) +} + +// UpdateStatus will set a user's status until the user, or another integration/plugin, sets it back to online. +// The status parameter can be: "online", "away", "dnd", or "offline". +// +// Minimum server version: 5.2 +func (u *UserService) UpdateStatus(userID, status string) (*model.Status, error) { + rStatus, appErr := u.api.UpdateUserStatus(userID, status) + + return rStatus, normalizeAppErr(appErr) +} + +// UpdateActive deactivates or reactivates an user. +// +// Minimum server version: 5.8 +func (u *UserService) UpdateActive(userID string, active bool) error { + appErr := u.api.UpdateUserActive(userID, active) + + return normalizeAppErr(appErr) +} + +// GetProfileImage gets user's profile image. +// +// Minimum server version: 5.6 +func (u *UserService) GetProfileImage(userID string) (io.Reader, error) { + contentBytes, appErr := u.api.GetProfileImage(userID) + if appErr != nil { + return nil, normalizeAppErr(appErr) + } + + return bytes.NewReader(contentBytes), nil +} + +// SetProfileImage sets a user's profile image. +// +// Minimum server version: 5.6 +func (u *UserService) SetProfileImage(userID string, content io.Reader) error { + contentBytes, err := io.ReadAll(content) + if err != nil { + return err + } + + return normalizeAppErr(u.api.SetProfileImage(userID, contentBytes)) +} + +// HasPermissionTo check if the user has the permission at system scope. +// +// Minimum server version: 5.3 +func (u *UserService) HasPermissionTo(userID string, permission *model.Permission) bool { + return u.api.HasPermissionTo(userID, permission) +} + +// HasPermissionToTeam check if the user has the permission at team scope. +// +// Minimum server version: 5.3 +func (u *UserService) HasPermissionToTeam(userID, teamID string, permission *model.Permission) bool { + return u.api.HasPermissionToTeam(userID, teamID, permission) +} + +// HasPermissionToChannel check if the user has the permission at channel scope. +// +// Minimum server version: 5.3 +func (u *UserService) HasPermissionToChannel(userID, channelID string, permission *model.Permission) bool { + return u.api.HasPermissionToChannel(userID, channelID, permission) +} + +// RolesGrantPermission check if the specified roles grant the specified permission +// +// Minimum server version: 6.3 +func (u *UserService) RolesGrantPermission(roleNames []string, permissionID string) bool { + return u.api.RolesGrantPermission(roleNames, permissionID) +} + +// GetLDAPAttributes will return LDAP attributes for a user. +// The attributes parameter should be a list of attributes to pull. +// Returns a map with attribute names as keys and the user's attributes as values. +// Requires an enterprise license, LDAP to be configured and for the user to use LDAP as an authentication method. +// +// Minimum server version: 5.3 +func (u *UserService) GetLDAPAttributes(userID string, attributes []string) (map[string]string, error) { + ldapUserAttributes, appErr := u.api.GetLDAPUserAttributes(userID, attributes) + + return ldapUserAttributes, normalizeAppErr(appErr) +} + +// CreateAccessToken creates a new access token. +// +// Minimum server version: 5.38 +func (u *UserService) CreateAccessToken(userID, description string) (*model.UserAccessToken, error) { + token := &model.UserAccessToken{ + UserId: userID, + Description: description, + } + + createdToken, appErr := u.api.CreateUserAccessToken(token) + + return createdToken, normalizeAppErr(appErr) +} + +// RevokeAccessToken revokes an existing access token. +// +// Minimum server version: 5.38 +func (u *UserService) RevokeAccessToken(tokenID string) error { + return normalizeAppErr(u.api.RevokeUserAccessToken(tokenID)) +} + +// UpdateRoles updates the roles for a user. +// +// Minimum server version: 9.8 +func (u *UserService) UpdateRoles(userID, newRoles string) (*model.User, error) { + user, appErr := u.api.UpdateUserRoles(userID, newRoles) + + return user, normalizeAppErr(appErr) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/pluginapi/utils.go b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/utils.go new file mode 100644 index 00000000..e676cad2 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/pluginapi/utils.go @@ -0,0 +1,30 @@ +package pluginapi + +import ( + "time" +) + +var backoffTimeouts = []time.Duration{ + 50 * time.Millisecond, + 100 * time.Millisecond, + 200 * time.Millisecond, + 200 * time.Millisecond, + 400 * time.Millisecond, + 400 * time.Millisecond, +} + +// progressiveRetry executes a BackoffOperation and waits an increasing time before retrying the operation. +func progressiveRetry(operation func() error) error { + var err error + + for attempts := range backoffTimeouts { + err = operation() + if err == nil { + return nil + } + + time.Sleep(backoffTimeouts[attempts]) + } + + return err +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/shared/configservice/configservice.go b/vendor/github.com/mattermost/mattermost/server/public/shared/configservice/configservice.go new file mode 100644 index 00000000..f8874b7f --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/shared/configservice/configservice.go @@ -0,0 +1,15 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package configservice + +import ( + "github.com/mattermost/mattermost/server/public/model" +) + +// An interface representing something that contains a Config, such as the app.App struct +type ConfigService interface { + Config() *model.Config + AddConfigListener(func(old, current *model.Config)) string + RemoveConfigListener(string) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/shared/driver/conn.go b/vendor/github.com/mattermost/mattermost/server/public/shared/driver/conn.go new file mode 100644 index 00000000..6e7ab761 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/shared/driver/conn.go @@ -0,0 +1,117 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package driver + +import ( + "context" + "database/sql/driver" + + "github.com/mattermost/mattermost/server/public/plugin" +) + +// Conn is a DB driver conn implementation +// which executes queries using the Plugin DB API. +type Conn struct { + id string + api plugin.Driver +} + +// driverConn is a super-interface combining the basic +// driver.Conn interface with some new additions later. +type driverConn interface { + driver.Conn + driver.ConnBeginTx + driver.ConnPrepareContext + driver.ExecerContext + driver.QueryerContext + driver.Pinger +} + +var ( + // Compile-time check to ensure Conn implements the interface. + _ driverConn = &Conn{} +) + +func (c *Conn) Begin() (tx driver.Tx, err error) { + txID, err := c.api.Tx(c.id, driver.TxOptions{}) + if err != nil { + return nil, err + } + + t := &wrapperTx{ + id: txID, + api: c.api, + } + return t, nil +} + +func (c *Conn) BeginTx(_ context.Context, opts driver.TxOptions) (driver.Tx, error) { + txID, err := c.api.Tx(c.id, opts) + if err != nil { + return nil, err + } + + t := &wrapperTx{ + id: txID, + api: c.api, + } + return t, nil +} + +func (c *Conn) Prepare(q string) (driver.Stmt, error) { + stID, err := c.api.Stmt(c.id, q) + if err != nil { + return nil, err + } + + st := &wrapperStmt{ + id: stID, + api: c.api, + } + return st, nil +} + +func (c *Conn) PrepareContext(_ context.Context, q string) (driver.Stmt, error) { + stID, err := c.api.Stmt(c.id, q) + if err != nil { + return nil, err + } + st := &wrapperStmt{ + id: stID, + api: c.api, + } + return st, nil +} + +func (c *Conn) ExecContext(_ context.Context, q string, args []driver.NamedValue) (driver.Result, error) { + resultContainer, err := c.api.ConnExec(c.id, q, args) + if err != nil { + return nil, err + } + res := &wrapperResult{ + res: resultContainer, + } + return res, nil +} + +func (c *Conn) QueryContext(_ context.Context, q string, args []driver.NamedValue) (driver.Rows, error) { + rowsID, err := c.api.ConnQuery(c.id, q, args) + if err != nil { + return nil, err + } + + rows := &wrapperRows{ + id: rowsID, + api: c.api, + } + return rows, nil +} + +func (c *Conn) Ping(_ context.Context) error { + return c.api.ConnPing(c.id) +} + +func (c *Conn) Close() error { + return c.api.ConnClose(c.id) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/shared/driver/driver.go b/vendor/github.com/mattermost/mattermost/server/public/shared/driver/driver.go new file mode 100644 index 00000000..aaa4a1be --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/shared/driver/driver.go @@ -0,0 +1,57 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +// package driver implements a DB driver that can be used by plugins +// to make SQL queries using RPC. This helps to avoid opening new connections +// for every plugin, and lets everyone use the central connection +// pool in the server. +// The tests for this package are at app/plugin_api_tests/test_db_driver/main.go. +package driver + +import ( + "context" + "database/sql/driver" + + "github.com/mattermost/mattermost/server/public/plugin" +) + +var ( + // Compile-time check to ensure Connector implements the interface. + _ driver.Connector = &Connector{} +) + +// Connector is the DB connector which is used to +// communicate with the DB API. +type Connector struct { + api plugin.Driver + isMaster bool +} + +// NewConnector returns a DB connector that can be used to return a sql.DB object. +// It takes a plugin.Driver implementation and a boolean flag to indicate whether +// to connect to a master or replica DB instance. +func NewConnector(api plugin.Driver, isMaster bool) *Connector { + return &Connector{api: api, isMaster: isMaster} +} + +func (c *Connector) Connect(_ context.Context) (driver.Conn, error) { + connID, err := c.api.Conn(c.isMaster) + if err != nil { + return nil, err + } + + return &Conn{id: connID, api: c.api}, nil +} + +func (c *Connector) Driver() driver.Driver { + return &Driver{c: c} +} + +// Driver is a DB driver implementation. +type Driver struct { + c *Connector +} + +func (d Driver) Open(name string) (driver.Conn, error) { + return d.c.Connect(context.Background()) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/shared/driver/objects.go b/vendor/github.com/mattermost/mattermost/server/public/shared/driver/objects.go new file mode 100644 index 00000000..12bf6f92 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/shared/driver/objects.go @@ -0,0 +1,114 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package driver + +import ( + "context" + "database/sql/driver" + + "github.com/mattermost/mattermost/server/public/plugin" +) + +type wrapperTx struct { + driver.Tx + id string + api plugin.Driver +} + +func (t *wrapperTx) Commit() error { + return t.api.TxCommit(t.id) +} + +func (t *wrapperTx) Rollback() error { + return t.api.TxRollback(t.id) +} + +type wrapperStmt struct { + driver.Stmt + id string + api plugin.Driver +} + +func (s *wrapperStmt) Close() error { + return s.api.StmtClose(s.id) +} + +func (s *wrapperStmt) NumInput() int { + return s.api.StmtNumInput(s.id) +} + +func (s *wrapperStmt) ExecContext(_ context.Context, args []driver.NamedValue) (driver.Result, error) { + resultContainer, err := s.api.StmtExec(s.id, args) + if err != nil { + return nil, err + } + res := &wrapperResult{ + res: resultContainer, + } + return res, nil +} + +func (s *wrapperStmt) QueryContext(_ context.Context, args []driver.NamedValue) (driver.Rows, error) { + rowsID, err := s.api.StmtQuery(s.id, args) + if err != nil { + return nil, err + } + rows := &wrapperRows{ + id: rowsID, + api: s.api, + } + return rows, nil +} + +// wrapperResult implements the driver.Result interface. +// This differs from other objects because it already contains the +// information for its methods. This does two things: +// +// 1. Simplifies server-side code by avoiding to track result ids +// in a map. +// 2. Avoids round-trip to compute result methods. +type wrapperResult struct { + res plugin.ResultContainer +} + +func (r *wrapperResult) LastInsertId() (int64, error) { + return r.res.LastID, r.res.LastIDError +} + +func (r *wrapperResult) RowsAffected() (int64, error) { + return r.res.RowsAffected, r.res.RowsAffectedError +} + +type wrapperRows struct { + id string + api plugin.Driver +} + +func (r *wrapperRows) Columns() []string { + return r.api.RowsColumns(r.id) +} + +func (r *wrapperRows) Close() error { + return r.api.RowsClose(r.id) +} + +func (r *wrapperRows) Next(dest []driver.Value) error { + return r.api.RowsNext(r.id, dest) +} + +func (r *wrapperRows) HasNextResultSet() bool { + return r.api.RowsHasNextResultSet(r.id) +} + +func (r *wrapperRows) NextResultSet() error { + return r.api.RowsNextResultSet(r.id) +} + +func (r *wrapperRows) ColumnTypeDatabaseTypeName(index int) string { + return r.api.RowsColumnTypeDatabaseTypeName(r.id, index) +} + +func (r *wrapperRows) ColumnTypePrecisionScale(index int) (precision, scale int64, ok bool) { + return r.api.RowsColumnTypePrecisionScale(r.id, index) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/shared/httpservice/client.go b/vendor/github.com/mattermost/mattermost/server/public/shared/httpservice/client.go new file mode 100644 index 00000000..b483b4c3 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/shared/httpservice/client.go @@ -0,0 +1,216 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package httpservice + +import ( + "context" + "crypto/tls" + "errors" + "fmt" + "net" + "net/http" + "net/netip" + "net/url" + "strings" + "time" + + "golang.org/x/net/http/httpproxy" +) + +const ( + ConnectTimeout = 3 * time.Second + RequestTimeout = 30 * time.Second +) + +var reservedIPRanges []*net.IPNet + +// IsReservedIP checks whether the target IP belongs to reserved IP address ranges to avoid SSRF attacks to the internal +// network of the Mattermost server +func IsReservedIP(ip net.IP) bool { + for _, ipRange := range reservedIPRanges { + if ipRange.Contains(ip) { + return true + } + } + return false +} + +// IsOwnIP handles the special case that a request might be made to the public IP of the host which on Linux is routed +// directly via the loopback IP to any listening sockets, effectively bypassing host-based firewalls such as firewalld +func IsOwnIP(ip net.IP) (bool, error) { + interfaces, err := net.Interfaces() + if err != nil { + return false, err + } + + for _, interf := range interfaces { + addresses, err := interf.Addrs() + if err != nil { + return false, err + } + + for _, addr := range addresses { + var selfIP net.IP + switch v := addr.(type) { + case *net.IPNet: + selfIP = v.IP + case *net.IPAddr: + selfIP = v.IP + } + + if ip.Equal(selfIP) { + return true, nil + } + } + } + + return false, nil +} + +var defaultUserAgent string + +func init() { + for _, cidr := range []string{ + // Strings taken from https://github.com/doyensec/safeurl/blob/main/ip.go + "10.0.0.0/8", /* Private network - RFC 1918 */ + "172.16.0.0/12", /* Private network - RFC 1918 */ + "192.168.0.0/16", /* Private network - RFC 1918 */ + "127.0.0.0/8", /* Loopback - RFC 1122, Section 3.2.1.3 */ + "0.0.0.0/8", /* Current network (only valid as source address) - RFC 1122, Section 3.2.1.3 */ + "169.254.0.0/16", /* Link-local - RFC 3927 */ + "192.0.0.0/24", /* IETF Protocol Assignments - RFC 5736 */ + "192.0.2.0/24", /* TEST-NET-1, documentation and examples - RFC 5737 */ + "198.51.100.0/24", /* TEST-NET-2, documentation and examples - RFC 5737 */ + "203.0.113.0/24", /* TEST-NET-3, documentation and examples - RFC 5737 */ + "192.88.99.0/24", /* IPv6 to IPv4 relay (includes 2002::/16) - RFC 3068 */ + "198.18.0.0/15", /* Network benchmark tests - RFC 2544 */ + "224.0.0.0/4", /* IP multicast (former Class D network) - RFC 3171 */ + "240.0.0.0/4", /* Reserved (former Class E network) - RFC 1112, Section 4 */ + "255.255.255.255/32", /* Broadcast - RFC 919, Section 7 */ + "100.64.0.0/10", /* Shared Address Space - RFC 6598 */ + // ipv6 sourced from https://www.iana.org/assignments/iana-ipv6-special-registry/iana-ipv6-special-registry.xhtml + "::/128", /* Unspecified Address - RFC 4291 */ + "::1/128", /* Loopback - RFC 4291 */ + "100::/64", /* Discard prefix - RFC 6666 */ + "2001::/23", /* IETF Protocol Assignments - RFC 2928 */ + "2001:2::/48", /* Benchmarking - RFC5180 */ + "2001:db8::/32", /* Addresses used in documentation and example source code - RFC 3849 */ + "2001::/32", /* Teredo tunneling - RFC4380 - RFC8190 */ + "fc00::/7", /* Unique local address - RFC 4193 - RFC 8190 */ + "fe80::/10", /* Link-local address - RFC 4291 */ + "ff00::/8", /* Multicast - RFC 3513 */ + "2002::/16", /* 6to4 - RFC 3056 */ + "64:ff9b::/96", /* IPv4/IPv6 translation - RFC 6052 */ + "2001:10::/28", /* Deprecated (previously ORCHID) - RFC 4843 */ + "2001:20::/28", /* ORCHIDv2 - RFC7343 */ + } { + _, parsed, err := net.ParseCIDR(cidr) + if err != nil { + panic(err) + } + reservedIPRanges = append(reservedIPRanges, parsed) + } + defaultUserAgent = "Mattermost-Bot/1.1" +} + +type DialContextFunction func(ctx context.Context, network, addr string) (net.Conn, error) + +var ErrAddressForbidden = errors.New("address forbidden, you may need to set AllowedUntrustedInternalConnections to allow an integration access to your internal network") + +// dialContextFilter wraps a dial function to filter connections based on host and IP validation. +// It first checks if the host is allowed, then resolves the hostname to IPs and validates each one. +// Returns detailed error messages when connections are rejected for security reasons. +func dialContextFilter(dial DialContextFunction, allowHost func(host string) bool, allowIP func(ip net.IP) error) DialContextFunction { + return func(ctx context.Context, network, addr string) (net.Conn, error) { + host, port, err := net.SplitHostPort(addr) + if err != nil { + return nil, err + } + + if allowHost != nil && allowHost(host) { + return dial(ctx, network, addr) + } + + ips, err := net.LookupIP(host) + if err != nil { + return nil, err + } + + var firstDialErr error + var forbiddenReasons []string + for _, ip := range ips { + select { + case <-ctx.Done(): + return nil, ctx.Err() + default: + } + + if allowIP == nil { + forbiddenReasons = append(forbiddenReasons, fmt.Sprintf("IP %s is not allowed", ip)) + continue + } + + if err := allowIP(ip); err != nil { + forbiddenReasons = append(forbiddenReasons, err.Error()) + continue + } + + conn, err := dial(ctx, network, net.JoinHostPort(ip.String(), port)) + if err == nil { + return conn, nil + } + if firstDialErr == nil { + firstDialErr = err + } + } + if firstDialErr == nil { + // If we didn't find an allowed IP address, return an error explaining why + if len(forbiddenReasons) > 0 { + return nil, fmt.Errorf("%s: %s", ErrAddressForbidden.Error(), strings.Join(forbiddenReasons, "; ")) + } + return nil, ErrAddressForbidden + } + return nil, firstDialErr + } +} + +func getProxyFn() func(r *http.Request) (*url.URL, error) { + proxyFromEnvFn := httpproxy.FromEnvironment().ProxyFunc() + return func(r *http.Request) (*url.URL, error) { + // TODO: Consider removing this code once MM-61938 is fixed upstream. + if r.URL != nil { + if addr, err := netip.ParseAddr(r.URL.Hostname()); err == nil && addr.Is6() && addr.Zone() != "" { + return nil, fmt.Errorf("invalid IPv6 address in URL: %q", addr) + } + } + + return proxyFromEnvFn(r.URL) + } +} + +// NewTransport creates a new MattermostTransport with detailed error messages for IP check failures +func NewTransport(enableInsecureConnections bool, allowHost func(host string) bool, allowIP func(ip net.IP) error) *MattermostTransport { + dialContext := (&net.Dialer{ + Timeout: ConnectTimeout, + KeepAlive: 30 * time.Second, + }).DialContext + + if allowHost != nil || allowIP != nil { + dialContext = dialContextFilter(dialContext, allowHost, allowIP) + } + + return &MattermostTransport{ + &http.Transport{ + Proxy: getProxyFn(), + DialContext: dialContext, + MaxIdleConns: 100, + IdleConnTimeout: 90 * time.Second, + TLSHandshakeTimeout: ConnectTimeout, + ExpectContinueTimeout: 1 * time.Second, + TLSClientConfig: &tls.Config{ + InsecureSkipVerify: enableInsecureConnections, + }, + }, + } +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/shared/httpservice/httpservice.go b/vendor/github.com/mattermost/mattermost/server/public/shared/httpservice/httpservice.go new file mode 100644 index 00000000..d886b45b --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/shared/httpservice/httpservice.go @@ -0,0 +1,116 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package httpservice + +import ( + "fmt" + "net" + "net/http" + "slices" + "strings" + "time" + "unicode" + + "github.com/mattermost/mattermost/server/public/model" + "github.com/mattermost/mattermost/server/public/plugin" +) + +// HTTPService wraps the functionality for making http requests to provide some improvements to the default client +// behaviour. +type HTTPService interface { + // MakeClient returns an http client constructed with a RoundTripper as returned by MakeTransport. + MakeClient(trustURLs bool) *http.Client + + // MakeTransport returns a RoundTripper that is suitable for making requests to external resources. The default + // implementation provides: + // - A shorter timeout for dial and TLS handshake (defined as constant "ConnectTimeout") + // - A timeout for end-to-end requests + // - A Mattermost-specific user agent header + // - Additional security for untrusted and insecure connections + MakeTransport(trustURLs bool) *MattermostTransport +} + +type getConfig interface { + Config() *model.Config +} + +type HTTPServiceImpl struct { + configService getConfig + + RequestTimeout time.Duration +} + +func splitFields(c rune) bool { + return unicode.IsSpace(c) || c == ',' +} + +func MakeHTTPService(configService getConfig) HTTPService { + return &HTTPServiceImpl{ + configService, + RequestTimeout, + } +} + +type pluginAPIConfigServiceAdapter struct { + pluginAPIConfigService plugin.API +} + +func (p *pluginAPIConfigServiceAdapter) Config() *model.Config { + return p.pluginAPIConfigService.GetConfig() +} + +func MakeHTTPServicePlugin(configService plugin.API) HTTPService { + return MakeHTTPService(&pluginAPIConfigServiceAdapter{configService}) +} + +func (h *HTTPServiceImpl) MakeClient(trustURLs bool) *http.Client { + return &http.Client{ + Transport: h.MakeTransport(trustURLs), + Timeout: h.RequestTimeout, + } +} + +func (h *HTTPServiceImpl) MakeTransport(trustURLs bool) *MattermostTransport { + insecure := h.configService.Config().ServiceSettings.EnableInsecureOutgoingConnections != nil && *h.configService.Config().ServiceSettings.EnableInsecureOutgoingConnections + + if trustURLs { + return NewTransport(insecure, nil, nil) + } + + allowHost := func(host string) bool { + if h.configService.Config().ServiceSettings.AllowedUntrustedInternalConnections == nil { + return false + } + return slices.Contains(strings.FieldsFunc(*h.configService.Config().ServiceSettings.AllowedUntrustedInternalConnections, splitFields), host) + } + + allowIP := func(ip net.IP) error { + reservedIP := IsReservedIP(ip) + + ownIP, err := IsOwnIP(ip) + if err != nil { + // If there is an error getting the self-assigned IPs, default to the secure option + return fmt.Errorf("unable to determine if IP is own IP: %w", err) + } + + // If it's not a reserved IP and it's not self-assigned IP, accept the IP + if !reservedIP && !ownIP { + return nil + } + + // In the case it's the self-assigned IP, enforce that it needs to be explicitly added to the AllowedUntrustedInternalConnections + for _, allowed := range strings.FieldsFunc(model.SafeDereference(h.configService.Config().ServiceSettings.AllowedUntrustedInternalConnections), splitFields) { + if _, ipRange, err := net.ParseCIDR(allowed); err == nil && ipRange.Contains(ip) { + return nil + } + } + + if reservedIP { + return fmt.Errorf("IP %s is in a reserved range and not in AllowedUntrustedInternalConnections", ip) + } + return fmt.Errorf("IP %s is a self-assigned IP and not in AllowedUntrustedInternalConnections", ip) + } + + return NewTransport(insecure, allowHost, allowIP) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/shared/httpservice/transport.go b/vendor/github.com/mattermost/mattermost/server/public/shared/httpservice/transport.go new file mode 100644 index 00000000..abbc6c8d --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/shared/httpservice/transport.go @@ -0,0 +1,21 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package httpservice + +import ( + "net/http" +) + +// MattermostTransport is an implementation of http.RoundTripper that ensures each request contains a custom user agent +// string to indicate that the request is coming from a Mattermost instance. +type MattermostTransport struct { + // Transport is the underlying http.RoundTripper that is actually used to make the request + Transport http.RoundTripper +} + +func (t *MattermostTransport) RoundTrip(req *http.Request) (*http.Response, error) { + req.Header.Set("User-Agent", defaultUserAgent) + + return t.Transport.RoundTrip(req) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/shared/i18n/i18n.go b/vendor/github.com/mattermost/mattermost/server/public/shared/i18n/i18n.go new file mode 100644 index 00000000..fb6a06e5 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/shared/i18n/i18n.go @@ -0,0 +1,343 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package i18n + +import ( + "fmt" + "html/template" + "net/http" + "os" + "path/filepath" + "reflect" + "slices" + "strings" + "sync" + + "github.com/mattermost/go-i18n/i18n" + "github.com/mattermost/go-i18n/i18n/bundle" + + "github.com/mattermost/mattermost/server/public/shared/mlog" +) + +// mut is used to protect other global variables from concurrent access. +// This should only be a concern in parallel tests. +var mut sync.Mutex + +const defaultLocale = "en" + +// TranslateFunc is the type of the translate functions +type TranslateFunc func(translationID string, args ...any) string + +// TranslationFuncByLocal is the type of function that takes local as a string and returns the translation function +type TranslationFuncByLocal func(locale string) TranslateFunc + +var ( + t TranslateFunc + tDefault TranslateFunc +) + +// T is the translate function using the default server language as fallback language +var T TranslateFunc = func(translationID string, args ...any) string { + mut.Lock() + defer mut.Unlock() + + if t == nil { + return translationID + } + + return t(translationID, args...) +} + +// TDefault is the translate function using english as fallback language +var TDefault TranslateFunc = func(translationID string, args ...any) string { + mut.Lock() + defer mut.Unlock() + + if tDefault == nil { + return translationID + } + + return t(translationID, args...) +} + +var locales = make(map[string]string) + +// supportedLocales is a hard-coded list of locales considered ready for production use. It must +// be kept in sync with ../../../../webapp/channels/src/i18n/i18n.jsx. +var supportedLocales = []string{ + "de", + "en", + "en-AU", + "es", + "fr", + "it", + "hu", + "nl", + "pl", + "pt-BR", + "ro", + "sv", + "vi", + "tr", + "bg", + "ru", + "uk", + "fa", + "ko", + "zh-CN", + "zh-TW", + "ja", +} + +var ( + defaultServerLocale string + defaultClientLocale string +) + +// TranslationsPreInit loads translations from filesystem if they are not +// loaded already and assigns english while loading server config +func TranslationsPreInit(translationsDir string) error { + mut.Lock() + defer mut.Unlock() + if t != nil { + return nil + } + + // Set T even if we fail to load the translations. Lots of shutdown handling code will + // segfault trying to handle the error, and the untranslated IDs are strictly better. + t = tfuncWithFallback(defaultLocale) + tDefault = tfuncWithFallback(defaultLocale) + + return initTranslationsWithDir(translationsDir) +} + +// TranslationsPreInitFromFileBytes loads translations from a buffer -- useful if +// we need to initialize i18n from an embedded i18n file (e.g., from a CLI tool) +func TranslationsPreInitFromFileBytes(filename string, buf []byte) error { + mut.Lock() + defer mut.Unlock() + if t != nil { + return nil + } + + // Set T even if we fail to load the translations. Lots of shutdown handling code will + // segfault trying to handle the error, and the untranslated IDs are strictly better. + t = tfuncWithFallback(defaultLocale) + tDefault = tfuncWithFallback(defaultLocale) + + locale := strings.Split(filename, ".")[0] + if !isSupportedLocale(locale) { + return fmt.Errorf("locale not supported: %s", locale) + } + + locales[locale] = filename + + return i18n.ParseTranslationFileBytes(filename, buf) +} + +// InitTranslations set the defaults configured in the server and initialize +// the T function using the server default as fallback language +func InitTranslations(serverLocale, clientLocale string) error { + mut.Lock() + defaultServerLocale = serverLocale + defaultClientLocale = clientLocale + mut.Unlock() + + tfn, err := GetTranslationsBySystemLocale() + + mut.Lock() + t = tfn + mut.Unlock() + + return err +} + +func initTranslationsWithDir(dir string) error { + files, _ := os.ReadDir(dir) + for _, f := range files { + if filepath.Ext(f.Name()) == ".json" { + filename := f.Name() + + locale := strings.Split(filename, ".")[0] + if !isSupportedLocale(locale) { + continue + } + + locales[locale] = filepath.Join(dir, filename) + + if err := i18n.LoadTranslationFile(filepath.Join(dir, filename)); err != nil { + return err + } + } + } + + return nil +} + +// GetTranslationFuncForDir loads translations from the filesystem into a new instance of the bundle. +// It returns a function to access loaded translations. +func GetTranslationFuncForDir(dir string) (TranslationFuncByLocal, error) { + availableLocals := make(map[string]string) + bundle := bundle.New() + files, _ := os.ReadDir(dir) + for _, f := range files { + if filepath.Ext(f.Name()) != ".json" { + continue + } + + locale := strings.Split(f.Name(), ".")[0] + if !isSupportedLocale(locale) { + continue + } + + filename := f.Name() + availableLocals[locale] = filepath.Join(dir, filename) + if err := bundle.LoadTranslationFile(filepath.Join(dir, filename)); err != nil { + return nil, err + } + } + + return func(locale string) TranslateFunc { + if _, ok := availableLocals[locale]; !ok { + locale = defaultLocale + } + + t, _ := bundle.Tfunc(locale) + return func(translationID string, args ...any) string { + if translated := t(translationID, args...); translated != translationID { + return translated + } + + t, _ := bundle.Tfunc(defaultLocale) + return t(translationID, args...) + } + }, nil +} + +func GetTranslationsBySystemLocale() (TranslateFunc, error) { + mut.Lock() + defer mut.Unlock() + locale := defaultServerLocale + if _, ok := locales[locale]; !ok { + mlog.Warn("Failed to load system translations for selected locale, attempting to fall back to default", mlog.String("locale", locale), mlog.String("default_locale", defaultLocale)) + locale = defaultLocale + } + + if !isSupportedLocale(locale) { + mlog.Warn("Selected locale is unsupported, attempting to fall back to default", mlog.String("locale", locale), mlog.String("default_locale", defaultLocale)) + locale = defaultLocale + } + + if locales[locale] == "" { + return nil, fmt.Errorf("failed to load system translations for '%v'", defaultLocale) + } + + translations := tfuncWithFallback(locale) + if translations == nil { + return nil, fmt.Errorf("failed to load system translations") + } + + mlog.Info("Loaded system translations", mlog.String("for locale", locale), mlog.String("from locale", locales[locale])) + return translations, nil +} + +// GetUserTranslations get the translation function for an specific locale +func GetUserTranslations(locale string) TranslateFunc { + mut.Lock() + defer mut.Unlock() + if _, ok := locales[locale]; !ok { + locale = defaultLocale + } + + translations := tfuncWithFallback(locale) + return translations +} + +// GetTranslationsAndLocaleFromRequest return the translation function and the +// locale based on a request headers +func GetTranslationsAndLocaleFromRequest(r *http.Request) (TranslateFunc, string) { + mut.Lock() + defer mut.Unlock() + // This is for checking against locales like pt_BR or zn_CN + headerLocaleFull := strings.Split(r.Header.Get("Accept-Language"), ",")[0] + // This is for checking against locales like en, es + headerLocale := strings.Split(strings.Split(r.Header.Get("Accept-Language"), ",")[0], "-")[0] + defaultLocale := defaultClientLocale + if locales[headerLocaleFull] != "" { + translations := tfuncWithFallback(headerLocaleFull) + return translations, headerLocaleFull + } else if locales[headerLocale] != "" { + translations := tfuncWithFallback(headerLocale) + return translations, headerLocale + } else if locales[defaultLocale] != "" { + translations := tfuncWithFallback(defaultLocale) + return translations, headerLocale + } + + translations := tfuncWithFallback(defaultLocale) + return translations, defaultLocale +} + +// GetSupportedLocales return a map of locale code and the file path with the +// translations +func GetSupportedLocales() map[string]string { + mut.Lock() + defer mut.Unlock() + return locales +} + +func tfuncWithFallback(pref string) TranslateFunc { + t, _ := i18n.Tfunc(pref) + return func(translationID string, args ...any) string { + if translated := t(translationID, args...); translated != translationID { + return translated + } + + t, _ := i18n.Tfunc(defaultLocale) + return t(translationID, args...) + } +} + +// TranslateAsHTML translates the translationID provided and return a +// template.HTML object +func TranslateAsHTML(t TranslateFunc, translationID string, args map[string]any) template.HTML { + message := t(translationID, escapeForHTML(args)) + message = strings.Replace(message, "[[", "", -1) + message = strings.Replace(message, "]]", "", -1) + return template.HTML(message) +} + +func escapeForHTML(arg any) any { + switch typedArg := arg.(type) { + case string: + return template.HTMLEscapeString(typedArg) + case *string: + return template.HTMLEscapeString(*typedArg) + case map[string]any: + safeArg := make(map[string]any, len(typedArg)) + for key, value := range typedArg { + safeArg[key] = escapeForHTML(value) + } + return safeArg + default: + mlog.Warn( + "Unable to escape value for HTML template", + mlog.Any("html_template", arg), + mlog.String("template_type", reflect.ValueOf(arg).Type().String()), + ) + return "" + } +} + +// IdentityTfunc returns a translation function that don't translate, only +// returns the same id +func IdentityTfunc() TranslateFunc { + return func(translationID string, args ...any) string { + return translationID + } +} + +func isSupportedLocale(locale string) bool { + return slices.Contains(supportedLocales, locale) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/autolink.go b/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/autolink.go new file mode 100644 index 00000000..c7a9345f --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/autolink.go @@ -0,0 +1,255 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package markdown + +import ( + "regexp" + "strings" + "unicode" + "unicode/utf8" +) + +// Based off of extensions/autolink.c from https://github.com/github/cmark + +var ( + DefaultURLSchemes = []string{"http", "https", "ftp", "mailto", "tel"} + wwwAutoLinkRegex = regexp.MustCompile(`^www\d{0,3}\.`) +) + +// Given a string with a w at the given position, tries to parse and return a range containing a www link. +// if one exists. If the text at the given position isn't a link, returns an empty string. Equivalent to +// www_match from the reference code. +func parseWWWAutolink(data string, position int) (Range, bool) { + // Check that this isn't part of another word + if position > 1 { + prevChar := data[position-1] + + if !isWhitespaceByte(prevChar) && !isAllowedBeforeWWWLink(prevChar) { + return Range{}, false + } + } + + // Check that this starts with www + if len(data)-position < 4 || !wwwAutoLinkRegex.MatchString(data[position:]) { + return Range{}, false + } + + end := checkDomain(data[position:], false) + if end == 0 { + return Range{}, false + } + + end += position + + // Grab all text until the end of the string or the next whitespace character + for end < len(data) && !isWhitespaceByte(data[end]) { + end += 1 + } + + // Trim trailing punctuation + end = trimTrailingCharactersFromLink(data, position, end) + if position == end { + return Range{}, false + } + + return Range{position, end}, true +} + +func isAllowedBeforeWWWLink(c byte) bool { + switch c { + case '*', '_', '~', ')', '<', '(', '>': + return true + } + return false +} + +// Given a string with a : at the given position, tried to parse and return a range containing a URL scheme +// if one exists. If the text around the given position isn't a link, returns an empty string. Equivalent to +// url_match from the reference code. +func parseURLAutolink(data string, position int) (Range, bool) { + // Check that a :// exists. This doesn't match the clients that treat the slashes as optional. + if len(data)-position < 4 || data[position+1] != '/' || data[position+2] != '/' { + return Range{}, false + } + + start := position - 1 + for start > 0 && isAlphanumericByte(data[start-1]) { + start -= 1 + } + + if start < 0 || position >= len(data) { + return Range{}, false + } + + // Ensure that the URL scheme is allowed and that at least one character after the scheme is valid. + scheme := data[start:position] + if !isSchemeAllowed(scheme) || !isValidHostCharacter(data[position+3:]) { + return Range{}, false + } + + end := checkDomain(data[position+3:], true) + if end == 0 { + return Range{}, false + } + + end += position + + // Grab all text until the end of the string or the next whitespace character + for end < len(data) && !isWhitespaceByte(data[end]) { + end += 1 + } + + // Trim trailing punctuation + end = trimTrailingCharactersFromLink(data, start, end) + if start == end { + return Range{}, false + } + + return Range{start, end}, true +} + +func isSchemeAllowed(scheme string) bool { + // Note that this doesn't support the custom URL schemes implemented by the client + for _, allowed := range DefaultURLSchemes { + if strings.EqualFold(allowed, scheme) { + return true + } + } + + return false +} + +// Given a string starting with a URL, returns the number of valid characters that make up the URL's domain. +// Returns 0 if the string doesn't start with a domain name. allowShort determines whether or not the domain +// needs to contain a period to be considered valid. Equivalent to check_domain from the reference code. +func checkDomain(data string, allowShort bool) int { + foundUnderscore := false + foundPeriod := false + + i := 1 + for ; i < len(data)-1; i++ { + if data[i] == '_' { + foundUnderscore = true + break + } else if data[i] == '.' { + foundPeriod = true + } else if !isValidHostCharacter(data[i:]) && data[i] != '-' { + break + } + } + + if foundUnderscore { + return 0 + } + + if allowShort { + // If allowShort is set, accept any string of valid domain characters + return i + } + + // If allowShort isn't set, a valid domain just requires at least a single period. Note that this + // logic isn't entirely necessary because we already know the string starts with "www." when + // this is called from parseWWWAutolink + if foundPeriod { + return i + } + return 0 +} + +// Returns true if the provided link starts with a valid character for a domain name. Equivalent to +// is_valid_hostchar from the reference code. +func isValidHostCharacter(link string) bool { + c, _ := utf8.DecodeRuneInString(link) + if c == utf8.RuneError { + return false + } + + return !unicode.IsSpace(c) && !unicode.IsPunct(c) +} + +// Removes any trailing characters such as punctuation or stray brackets that shouldn't be part of the link. +// Returns a new end position for the link. Equivalent to autolink_delim from the reference code. +func trimTrailingCharactersFromLink(markdown string, start int, end int) int { + runes := []rune(markdown[start:end]) + linkEnd := len(runes) + + // Cut off the link before an angle bracket if it contains one + for i, c := range runes { + if c == '<' || c == '>' { + linkEnd = i + break + } + } + + for linkEnd > 0 { + c := runes[linkEnd-1] + + if !canEndAutolink(c) { + // Trim trailing quotes, periods, etc + linkEnd = linkEnd - 1 + } else if c == ';' { + // Trim a trailing HTML entity + newEnd := linkEnd - 2 + + for newEnd > 0 && ((runes[newEnd] >= 'a' && runes[newEnd] <= 'z') || (runes[newEnd] >= 'A' && runes[newEnd] <= 'Z')) { + newEnd -= 1 + } + + if newEnd < linkEnd-2 && runes[newEnd] == '&' { + linkEnd = newEnd + } else { + // This isn't actually an HTML entity, so just trim the semicolon + linkEnd = linkEnd - 1 + } + } else if c == ')' { + // Only allow an autolink ending with a bracket if that bracket is part of a matching pair of brackets. + // If there are more closing brackets than opening ones, remove the extra bracket + + numClosing := 0 + numOpening := 0 + + // Examples (input text => output linked portion): + // + // http://www.pokemon.com/Pikachu_(Electric) + // => http://www.pokemon.com/Pikachu_(Electric) + // + // http://www.pokemon.com/Pikachu_((Electric) + // => http://www.pokemon.com/Pikachu_((Electric) + // + // http://www.pokemon.com/Pikachu_(Electric)) + // => http://www.pokemon.com/Pikachu_(Electric) + // + // http://www.pokemon.com/Pikachu_((Electric)) + // => http://www.pokemon.com/Pikachu_((Electric)) + + for i := 0; i < linkEnd; i++ { + if runes[i] == '(' { + numOpening += 1 + } else if runes[i] == ')' { + numClosing += 1 + } + } + + if numClosing <= numOpening { + // There's fewer or equal closing brackets, so we've found the end of the link + break + } + + linkEnd -= 1 + } else { + // There's no special characters at the end of the link, so we're at the end + break + } + } + + return start + len(string(runes[:linkEnd])) +} + +func canEndAutolink(c rune) bool { + switch c { + case '?', '!', '.', ',', ':', '*', '_', '~', '\'', '"': + return false + } + return true +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/block_quote.go b/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/block_quote.go new file mode 100644 index 00000000..5cf66d10 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/block_quote.go @@ -0,0 +1,62 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package markdown + +type BlockQuote struct { + blockBase + markdown string + + Children []Block +} + +func (b *BlockQuote) Continuation(indentation int, r Range) *continuation { + if indentation > 3 { + return nil + } + s := b.markdown[r.Position:r.End] + if s == "" || s[0] != '>' { + return nil + } + remaining := Range{r.Position + 1, r.End} + indentation, indentationBytes := countIndentation(b.markdown, remaining) + if indentation > 0 { + indentation-- + } + return &continuation{ + Indentation: indentation, + Remaining: Range{remaining.Position + indentationBytes, remaining.End}, + } +} + +func (b *BlockQuote) AddChild(openBlocks []Block) []Block { + b.Children = append(b.Children, openBlocks[0]) + return openBlocks +} + +func blockQuoteStart(markdown string, indent int, r Range) []Block { + if indent > 3 { + return nil + } + s := markdown[r.Position:r.End] + if s == "" || s[0] != '>' { + return nil + } + + block := &BlockQuote{ + markdown: markdown, + } + r.Position++ + if len(s) > 1 && s[1] == ' ' { + r.Position++ + } + + indent, bytes := countIndentation(markdown, r) + + ret := []Block{block} + if descendants := blockStartOrParagraph(markdown, indent, Range{r.Position + bytes, r.End}, nil, nil); descendants != nil { + block.Children = append(block.Children, descendants[0]) + ret = append(ret, descendants...) + } + return ret +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/blocks.go b/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/blocks.go new file mode 100644 index 00000000..fe9e272f --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/blocks.go @@ -0,0 +1,154 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package markdown + +import ( + "strings" +) + +type continuation struct { + Indentation int + Remaining Range +} + +type Block interface { + Continuation(indentation int, r Range) *continuation + AddLine(indentation int, r Range) bool + Close() + AllowsBlockStarts() bool + HasTrailingBlankLine() bool +} + +type blockBase struct{} + +func (*blockBase) AddLine(indentation int, r Range) bool { return false } +func (*blockBase) Close() {} +func (*blockBase) AllowsBlockStarts() bool { return true } +func (*blockBase) HasTrailingBlankLine() bool { return false } + +type ContainerBlock interface { + Block + AddChild(openBlocks []Block) []Block +} + +type Range struct { + Position int + End int +} + +func closeBlocks(blocks []Block, referenceDefinitions []*ReferenceDefinition) []*ReferenceDefinition { + for _, block := range blocks { + block.Close() + if p, ok := block.(*Paragraph); ok && len(p.ReferenceDefinitions) > 0 { + referenceDefinitions = append(referenceDefinitions, p.ReferenceDefinitions...) + } + } + return referenceDefinitions +} + +func ParseBlocks(markdown string, lines []Line) (*Document, []*ReferenceDefinition) { + document := &Document{} + var referenceDefinitions []*ReferenceDefinition + + openBlocks := []Block{document} + + for _, line := range lines { + r := line.Range + lastMatchIndex := 0 + + indentation, indentationBytes := countIndentation(markdown, r) + r = Range{r.Position + indentationBytes, r.End} + + for i, block := range openBlocks { + if continuation := block.Continuation(indentation, r); continuation != nil { + indentation = continuation.Indentation + r = continuation.Remaining + additionalIndentation, additionalIndentationBytes := countIndentation(markdown, r) + r = Range{r.Position + additionalIndentationBytes, r.End} + indentation += additionalIndentation + lastMatchIndex = i + } else { + break + } + } + + if openBlocks[lastMatchIndex].AllowsBlockStarts() { + if newBlocks := blockStart(markdown, indentation, r, openBlocks[:lastMatchIndex+1], openBlocks[lastMatchIndex+1:]); newBlocks != nil { + didAdd := false + for i := lastMatchIndex; i >= 0; i-- { + if container, ok := openBlocks[i].(ContainerBlock); ok { + if addedBlocks := container.AddChild(newBlocks); addedBlocks != nil { + referenceDefinitions = closeBlocks(openBlocks[i+1:], referenceDefinitions) + openBlocks = openBlocks[:i+1] + openBlocks = append(openBlocks, addedBlocks...) + didAdd = true + break + } + } + } + if didAdd { + continue + } + } + } + + isBlank := strings.TrimSpace(markdown[r.Position:r.End]) == "" + if paragraph, ok := openBlocks[len(openBlocks)-1].(*Paragraph); ok && !isBlank { + paragraph.Text = append(paragraph.Text, r) + continue + } + + referenceDefinitions = closeBlocks(openBlocks[lastMatchIndex+1:], referenceDefinitions) + openBlocks = openBlocks[:lastMatchIndex+1] + + if openBlocks[lastMatchIndex].AddLine(indentation, r) { + continue + } + + if paragraph := newParagraph(markdown, r); paragraph != nil { + for i := lastMatchIndex; i >= 0; i-- { + if container, ok := openBlocks[i].(ContainerBlock); ok { + if newBlocks := container.AddChild([]Block{paragraph}); newBlocks != nil { + referenceDefinitions = closeBlocks(openBlocks[i+1:], referenceDefinitions) + openBlocks = openBlocks[:i+1] + openBlocks = append(openBlocks, newBlocks...) + break + } + } + } + } + } + + referenceDefinitions = closeBlocks(openBlocks, referenceDefinitions) + + return document, referenceDefinitions +} + +func blockStart(markdown string, indentation int, r Range, matchedBlocks, unmatchedBlocks []Block) []Block { + if r.Position >= r.End { + return nil + } + + if start := blockQuoteStart(markdown, indentation, r); start != nil { + return start + } else if start := listStart(markdown, indentation, r, matchedBlocks, unmatchedBlocks); start != nil { + return start + } else if start := indentedCodeStart(markdown, indentation, r, matchedBlocks, unmatchedBlocks); start != nil { + return start + } else if start := fencedCodeStart(markdown, indentation, r); start != nil { + return start + } + + return nil +} + +func blockStartOrParagraph(markdown string, indentation int, r Range, matchedBlocks, unmatchedBlocks []Block) []Block { + if start := blockStart(markdown, indentation, r, matchedBlocks, unmatchedBlocks); start != nil { + return start + } + if paragraph := newParagraph(markdown, r); paragraph != nil { + return []Block{paragraph} + } + return nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/document.go b/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/document.go new file mode 100644 index 00000000..306b93da --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/document.go @@ -0,0 +1,22 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package markdown + +type Document struct { + blockBase + + Children []Block +} + +func (b *Document) Continuation(indentation int, r Range) *continuation { + return &continuation{ + Indentation: indentation, + Remaining: r, + } +} + +func (b *Document) AddChild(openBlocks []Block) []Block { + b.Children = append(b.Children, openBlocks[0]) + return openBlocks +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/emoji.go b/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/emoji.go new file mode 100644 index 00000000..8d2aaf9d --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/emoji.go @@ -0,0 +1,42 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package markdown + +import ( + "regexp" +) + +// Based off the mobile app's emoji parsing from https://github.com/mattermost/commonmark.js + +var ( + emojiRegex = regexp.MustCompile(`^:([a-z0-9_\-+]+):\B`) +) + +// parseEmoji attempts to parse a named emoji (eg. :taco:) starting at the current parser position. If an emoji is +// found, it adds that to p.inlines and returns true. Otherwise, it returns false. +func (p *inlineParser) parseEmoji() bool { + // Only allow emojis after non-word characters + if p.position > 1 { + prevChar := p.raw[p.position-1] + + if isWordByte(prevChar) { + return false + } + } + + remaining := p.raw[p.position:] + + loc := emojiRegex.FindStringIndex(remaining) + if loc == nil { + return false + } + + // Note that there may not be a system or custom emoji that exists with this name + p.inlines = append(p.inlines, &Emoji{ + Name: remaining[loc[0]+1 : loc[1]-1], + }) + p.position += loc[1] - loc[0] + + return true +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/fenced_code.go b/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/fenced_code.go new file mode 100644 index 00000000..22f43bfd --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/fenced_code.go @@ -0,0 +1,113 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package markdown + +import ( + "strings" +) + +type FencedCodeLine struct { + Indentation int + Range Range +} + +type FencedCode struct { + blockBase + markdown string + didSeeClosingFence bool + + Indentation int + OpeningFence Range + RawInfo Range + RawCode []FencedCodeLine +} + +func (b *FencedCode) Code() string { + var resultSb strings.Builder + for _, code := range b.RawCode { + resultSb.WriteString(strings.Repeat(" ", code.Indentation) + b.markdown[code.Range.Position:code.Range.End]) + } + return resultSb.String() +} + +func (b *FencedCode) Info() string { + return Unescape(b.markdown[b.RawInfo.Position:b.RawInfo.End]) +} + +func (b *FencedCode) Continuation(indentation int, r Range) *continuation { + if b.didSeeClosingFence { + return nil + } + return &continuation{ + Indentation: indentation, + Remaining: r, + } +} + +func (b *FencedCode) AddLine(indentation int, r Range) bool { + s := b.markdown[r.Position:r.End] + if indentation <= 3 && strings.HasPrefix(s, b.markdown[b.OpeningFence.Position:b.OpeningFence.End]) { + suffix := strings.TrimSpace(s[b.OpeningFence.End-b.OpeningFence.Position:]) + isClosingFence := true + for _, c := range suffix { + if c != rune(s[0]) { + isClosingFence = false + break + } + } + if isClosingFence { + b.didSeeClosingFence = true + return true + } + } + + if indentation >= b.Indentation { + indentation -= b.Indentation + } else { + indentation = 0 + } + + b.RawCode = append(b.RawCode, FencedCodeLine{ + Indentation: indentation, + Range: r, + }) + return true +} + +func (b *FencedCode) AllowsBlockStarts() bool { + return false +} + +func fencedCodeStart(markdown string, indentation int, r Range) []Block { + s := markdown[r.Position:r.End] + + if !strings.HasPrefix(s, "```") && !strings.HasPrefix(s, "~~~") { + return nil + } + + fenceCharacter := rune(s[0]) + fenceLength := 3 + for _, c := range s[3:] { + if c == fenceCharacter { + fenceLength++ + } else { + break + } + } + + for i := r.Position + fenceLength; i < r.End; i++ { + if markdown[i] == '`' { + return nil + } + } + + return []Block{ + &FencedCode{ + markdown: markdown, + Indentation: indentation, + RawInfo: trimRightSpace(markdown, Range{r.Position + fenceLength, r.End}), + OpeningFence: Range{r.Position, r.Position + fenceLength}, + }, + } +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/html.go b/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/html.go new file mode 100644 index 00000000..fb63c419 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/html.go @@ -0,0 +1,213 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package markdown + +import ( + "fmt" + "strings" +) + +var htmlEscaper = strings.NewReplacer( + `&`, "&", + `<`, "<", + `>`, ">", + `"`, """, +) + +// RenderHTML produces HTML with the same behavior as the example renderer used in the CommonMark +// reference materials except for one slight difference: for brevity, no unnecessary whitespace is +// inserted between elements. The output is not defined by the CommonMark spec, and it exists +// primarily as an aid in testing. +func RenderHTML(markdown string) string { + return RenderBlockHTML(Parse(markdown)) +} + +func RenderBlockHTML(block Block, referenceDefinitions []*ReferenceDefinition) (result string) { + return renderBlockHTML(block, referenceDefinitions, false) +} + +func renderBlockHTML(block Block, referenceDefinitions []*ReferenceDefinition, isTightList bool) string { + var resultSb strings.Builder + + switch v := block.(type) { + case *Document: + for _, block := range v.Children { + resultSb.WriteString(RenderBlockHTML(block, referenceDefinitions)) + } + case *Paragraph: + if len(v.Text) == 0 { + return "" + } + if !isTightList { + resultSb.WriteString("

") + } + for _, inline := range v.ParseInlines(referenceDefinitions) { + resultSb.WriteString(RenderInlineHTML(inline)) + } + if !isTightList { + resultSb.WriteString("

") + } + case *List: + if v.IsOrdered { + if v.OrderedStart != 1 { + resultSb.WriteString(fmt.Sprintf(`
    `, v.OrderedStart)) + } else { + resultSb.WriteString("
      ") + } + } else { + resultSb.WriteString("
        ") + } + for _, block := range v.Children { + resultSb.WriteString(renderBlockHTML(block, referenceDefinitions, !v.IsLoose)) + } + if v.IsOrdered { + resultSb.WriteString("
    ") + } else { + resultSb.WriteString("") + } + case *ListItem: + resultSb.WriteString("
  1. ") + for _, block := range v.Children { + resultSb.WriteString(renderBlockHTML(block, referenceDefinitions, isTightList)) + } + resultSb.WriteString("
  2. ") + case *BlockQuote: + resultSb.WriteString("
    ") + for _, block := range v.Children { + resultSb.WriteString(RenderBlockHTML(block, referenceDefinitions)) + } + resultSb.WriteString("
    ") + case *FencedCode: + if info := v.Info(); info != "" { + language := strings.Fields(info)[0] + resultSb.WriteString(`
    `)
    +		} else {
    +			resultSb.WriteString("
    ")
    +		}
    +		resultSb.WriteString(htmlEscaper.Replace(v.Code()))
    +		resultSb.WriteString("
    ") + case *IndentedCode: + resultSb.WriteString("
    ")
    +		resultSb.WriteString(htmlEscaper.Replace(v.Code()))
    +		resultSb.WriteString("
    ") + default: + panic(fmt.Sprintf("missing case for type %T", v)) + } + + return resultSb.String() +} + +func escapeURL(url string) (result string) { + for i := 0; i < len(url); { + switch b := url[i]; b { + case ';', '/', '?', ':', '@', '&', '=', '+', '$', ',', '-', '_', '.', '!', '~', '*', '\'', '(', ')', '#': + result += string(b) + i++ + default: + if b == '%' && i+2 < len(url) && isHexByte(url[i+1]) && isHexByte(url[i+2]) { + result += url[i : i+3] + i += 3 + } else if (b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z') || (b >= '0' && b <= '9') { + result += string(b) + i++ + } else { + result += fmt.Sprintf("%%%0X", b) + i++ + } + } + } + return +} + +func RenderInlineHTML(inline Inline) (result string) { + switch v := inline.(type) { + case *Text: + return htmlEscaper.Replace(v.Text) + case *HardLineBreak: + return "
    " + case *SoftLineBreak: + return "\n" + case *CodeSpan: + return "" + htmlEscaper.Replace(v.Code) + "" + case *InlineImage: + result += `` + htmlEscaper.Replace(renderImageAltText(v.Children)) + `` + case *ReferenceImage: + result += `` + htmlEscaper.Replace(renderImageAltText(v.Children)) + `` + case *InlineLink: + var resultSb strings.Builder + resultSb.WriteString(``) + for _, inline := range v.Children { + resultSb.WriteString(RenderInlineHTML(inline)) + } + resultSb.WriteString("") + return resultSb.String() + case *ReferenceLink: + var resultSb strings.Builder + resultSb.WriteString(``) + for _, inline := range v.Children { + resultSb.WriteString(RenderInlineHTML(inline)) + } + resultSb.WriteString("") + return resultSb.String() + case *Autolink: + var resultSb strings.Builder + resultSb.WriteString(``) + for _, inline := range v.Children { + resultSb.WriteString(RenderInlineHTML(inline)) + } + resultSb.WriteString("") + return resultSb.String() + case *Emoji: + escapedName := htmlEscaper.Replace(v.Name) + result += fmt.Sprintf(``, escapedName, escapedName) + + default: + panic(fmt.Sprintf("missing case for type %T", v)) + } + return +} + +func renderImageAltText(children []Inline) string { + var resultSb strings.Builder + for _, inline := range children { + resultSb.WriteString(renderImageChildAltText(inline)) + } + return resultSb.String() +} + +func renderImageChildAltText(inline Inline) string { + switch v := inline.(type) { + case *Text: + return v.Text + case *InlineImage: + var resultSb strings.Builder + for _, inline := range v.Children { + resultSb.WriteString(renderImageChildAltText(inline)) + } + return resultSb.String() + case *InlineLink: + var resultSb strings.Builder + for _, inline := range v.Children { + resultSb.WriteString(renderImageChildAltText(inline)) + } + return resultSb.String() + } + return "" +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/html_entities.go b/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/html_entities.go new file mode 100644 index 00000000..24c69a50 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/html_entities.go @@ -0,0 +1,2133 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +//nolint:misspell +package markdown + +var htmlEntities = map[string]string{ + "AElig": "\u00C6", + "AMP": "\u0026", + "Aacute": "\u00C1", + "Abreve": "\u0102", + "Acirc": "\u00C2", + "Acy": "\u0410", + "Afr": "\U0001D504", + "Agrave": "\u00C0", + "Alpha": "\u0391", + "Amacr": "\u0100", + "And": "\u2A53", + "Aogon": "\u0104", + "Aopf": "\U0001D538", + "ApplyFunction": "\u2061", + "Aring": "\u00C5", + "Ascr": "\U0001D49C", + "Assign": "\u2254", + "Atilde": "\u00C3", + "Auml": "\u00C4", + "Backslash": "\u2216", + "Barv": "\u2AE7", + "Barwed": "\u2306", + "Bcy": "\u0411", + "Because": "\u2235", + "Bernoullis": "\u212C", + "Beta": "\u0392", + "Bfr": "\U0001D505", + "Bopf": "\U0001D539", + "Breve": "\u02D8", + "Bscr": "\u212C", + "Bumpeq": "\u224E", + "CHcy": "\u0427", + "COPY": "\u00A9", + "Cacute": "\u0106", + "Cap": "\u22D2", + "CapitalDifferentialD": "\u2145", + "Cayleys": "\u212D", + "Ccaron": "\u010C", + "Ccedil": "\u00C7", + "Ccirc": "\u0108", + "Cconint": "\u2230", + "Cdot": "\u010A", + "Cedilla": "\u00B8", + "CenterDot": "\u00B7", + "Cfr": "\u212D", + "Chi": "\u03A7", + "CircleDot": "\u2299", + "CircleMinus": "\u2296", + "CirclePlus": "\u2295", + "CircleTimes": "\u2297", + "ClockwiseContourIntegral": "\u2232", + "CloseCurlyDoubleQuote": "\u201D", + "CloseCurlyQuote": "\u2019", + "Colon": "\u2237", + "Colone": "\u2A74", + "Congruent": "\u2261", + "Conint": "\u222F", + "ContourIntegral": "\u222E", + "Copf": "\u2102", + "Coproduct": "\u2210", + "CounterClockwiseContourIntegral": "\u2233", + "Cross": "\u2A2F", + "Cscr": "\U0001D49E", + "Cup": "\u22D3", + "CupCap": "\u224D", + "DD": "\u2145", + "DDotrahd": "\u2911", + "DJcy": "\u0402", + "DScy": "\u0405", + "DZcy": "\u040F", + "Dagger": "\u2021", + "Darr": "\u21A1", + "Dashv": "\u2AE4", + "Dcaron": "\u010E", + "Dcy": "\u0414", + "Del": "\u2207", + "Delta": "\u0394", + "Dfr": "\U0001D507", + "DiacriticalAcute": "\u00B4", + "DiacriticalDot": "\u02D9", + "DiacriticalDoubleAcute": "\u02DD", + "DiacriticalGrave": "\u0060", + "DiacriticalTilde": "\u02DC", + "Diamond": "\u22C4", + "DifferentialD": "\u2146", + "Dopf": "\U0001D53B", + "Dot": "\u00A8", + "DotDot": "\u20DC", + "DotEqual": "\u2250", + "DoubleContourIntegral": "\u222F", + "DoubleDot": "\u00A8", + "DoubleDownArrow": "\u21D3", + "DoubleLeftArrow": "\u21D0", + "DoubleLeftRightArrow": "\u21D4", + "DoubleLeftTee": "\u2AE4", + "DoubleLongLeftArrow": "\u27F8", + "DoubleLongLeftRightArrow": "\u27FA", + "DoubleLongRightArrow": "\u27F9", + "DoubleRightArrow": "\u21D2", + "DoubleRightTee": "\u22A8", + "DoubleUpArrow": "\u21D1", + "DoubleUpDownArrow": "\u21D5", + "DoubleVerticalBar": "\u2225", + "DownArrow": "\u2193", + "DownArrowBar": "\u2913", + "DownArrowUpArrow": "\u21F5", + "DownBreve": "\u0311", + "DownLeftRightVector": "\u2950", + "DownLeftTeeVector": "\u295E", + "DownLeftVector": "\u21BD", + "DownLeftVectorBar": "\u2956", + "DownRightTeeVector": "\u295F", + "DownRightVector": "\u21C1", + "DownRightVectorBar": "\u2957", + "DownTee": "\u22A4", + "DownTeeArrow": "\u21A7", + "Downarrow": "\u21D3", + "Dscr": "\U0001D49F", + "Dstrok": "\u0110", + "ENG": "\u014A", + "ETH": "\u00D0", + "Eacute": "\u00C9", + "Ecaron": "\u011A", + "Ecirc": "\u00CA", + "Ecy": "\u042D", + "Edot": "\u0116", + "Efr": "\U0001D508", + "Egrave": "\u00C8", + "Element": "\u2208", + "Emacr": "\u0112", + "EmptySmallSquare": "\u25FB", + "EmptyVerySmallSquare": "\u25AB", + "Eogon": "\u0118", + "Eopf": "\U0001D53C", + "Epsilon": "\u0395", + "Equal": "\u2A75", + "EqualTilde": "\u2242", + "Equilibrium": "\u21CC", + "Escr": "\u2130", + "Esim": "\u2A73", + "Eta": "\u0397", + "Euml": "\u00CB", + "Exists": "\u2203", + "ExponentialE": "\u2147", + "Fcy": "\u0424", + "Ffr": "\U0001D509", + "FilledSmallSquare": "\u25FC", + "FilledVerySmallSquare": "\u25AA", + "Fopf": "\U0001D53D", + "ForAll": "\u2200", + "Fouriertrf": "\u2131", + "Fscr": "\u2131", + "GJcy": "\u0403", + "GT": "\u003E", + "Gamma": "\u0393", + "Gammad": "\u03DC", + "Gbreve": "\u011E", + "Gcedil": "\u0122", + "Gcirc": "\u011C", + "Gcy": "\u0413", + "Gdot": "\u0120", + "Gfr": "\U0001D50A", + "Gg": "\u22D9", + "Gopf": "\U0001D53E", + "GreaterEqual": "\u2265", + "GreaterEqualLess": "\u22DB", + "GreaterFullEqual": "\u2267", + "GreaterGreater": "\u2AA2", + "GreaterLess": "\u2277", + "GreaterSlantEqual": "\u2A7E", + "GreaterTilde": "\u2273", + "Gscr": "\U0001D4A2", + "Gt": "\u226B", + "HARDcy": "\u042A", + "Hacek": "\u02C7", + "Hat": "\u005E", + "Hcirc": "\u0124", + "Hfr": "\u210C", + "HilbertSpace": "\u210B", + "Hopf": "\u210D", + "HorizontalLine": "\u2500", + "Hscr": "\u210B", + "Hstrok": "\u0126", + "HumpDownHump": "\u224E", + "HumpEqual": "\u224F", + "IEcy": "\u0415", + "IJlig": "\u0132", + "IOcy": "\u0401", + "Iacute": "\u00CD", + "Icirc": "\u00CE", + "Icy": "\u0418", + "Idot": "\u0130", + "Ifr": "\u2111", + "Igrave": "\u00CC", + "Im": "\u2111", + "Imacr": "\u012A", + "ImaginaryI": "\u2148", + "Implies": "\u21D2", + "Int": "\u222C", + "Integral": "\u222B", + "Intersection": "\u22C2", + "InvisibleComma": "\u2063", + "InvisibleTimes": "\u2062", + "Iogon": "\u012E", + "Iopf": "\U0001D540", + "Iota": "\u0399", + "Iscr": "\u2110", + "Itilde": "\u0128", + "Iukcy": "\u0406", + "Iuml": "\u00CF", + "Jcirc": "\u0134", + "Jcy": "\u0419", + "Jfr": "\U0001D50D", + "Jopf": "\U0001D541", + "Jscr": "\U0001D4A5", + "Jsercy": "\u0408", + "Jukcy": "\u0404", + "KHcy": "\u0425", + "KJcy": "\u040C", + "Kappa": "\u039A", + "Kcedil": "\u0136", + "Kcy": "\u041A", + "Kfr": "\U0001D50E", + "Kopf": "\U0001D542", + "Kscr": "\U0001D4A6", + "LJcy": "\u0409", + "LT": "\u003C", + "Lacute": "\u0139", + "Lambda": "\u039B", + "Lang": "\u27EA", + "Laplacetrf": "\u2112", + "Larr": "\u219E", + "Lcaron": "\u013D", + "Lcedil": "\u013B", + "Lcy": "\u041B", + "LeftAngleBracket": "\u27E8", + "LeftArrow": "\u2190", + "LeftArrowBar": "\u21E4", + "LeftArrowRightArrow": "\u21C6", + "LeftCeiling": "\u2308", + "LeftDoubleBracket": "\u27E6", + "LeftDownTeeVector": "\u2961", + "LeftDownVector": "\u21C3", + "LeftDownVectorBar": "\u2959", + "LeftFloor": "\u230A", + "LeftRightArrow": "\u2194", + "LeftRightVector": "\u294E", + "LeftTee": "\u22A3", + "LeftTeeArrow": "\u21A4", + "LeftTeeVector": "\u295A", + "LeftTriangle": "\u22B2", + "LeftTriangleBar": "\u29CF", + "LeftTriangleEqual": "\u22B4", + "LeftUpDownVector": "\u2951", + "LeftUpTeeVector": "\u2960", + "LeftUpVector": "\u21BF", + "LeftUpVectorBar": "\u2958", + "LeftVector": "\u21BC", + "LeftVectorBar": "\u2952", + "Leftarrow": "\u21D0", + "Leftrightarrow": "\u21D4", + "LessEqualGreater": "\u22DA", + "LessFullEqual": "\u2266", + "LessGreater": "\u2276", + "LessLess": "\u2AA1", + "LessSlantEqual": "\u2A7D", + "LessTilde": "\u2272", + "Lfr": "\U0001D50F", + "Ll": "\u22D8", + "Lleftarrow": "\u21DA", + "Lmidot": "\u013F", + "LongLeftArrow": "\u27F5", + "LongLeftRightArrow": "\u27F7", + "LongRightArrow": "\u27F6", + "Longleftarrow": "\u27F8", + "Longleftrightarrow": "\u27FA", + "Longrightarrow": "\u27F9", + "Lopf": "\U0001D543", + "LowerLeftArrow": "\u2199", + "LowerRightArrow": "\u2198", + "Lscr": "\u2112", + "Lsh": "\u21B0", + "Lstrok": "\u0141", + "Lt": "\u226A", + "Map": "\u2905", + "Mcy": "\u041C", + "MediumSpace": "\u205F", + "Mellintrf": "\u2133", + "Mfr": "\U0001D510", + "MinusPlus": "\u2213", + "Mopf": "\U0001D544", + "Mscr": "\u2133", + "Mu": "\u039C", + "NJcy": "\u040A", + "Nacute": "\u0143", + "Ncaron": "\u0147", + "Ncedil": "\u0145", + "Ncy": "\u041D", + "NegativeMediumSpace": "\u200B", + "NegativeThickSpace": "\u200B", + "NegativeThinSpace": "\u200B", + "NegativeVeryThinSpace": "\u200B", + "NestedGreaterGreater": "\u226B", + "NestedLessLess": "\u226A", + "NewLine": "\u000A", + "Nfr": "\U0001D511", + "NoBreak": "\u2060", + "NonBreakingSpace": "\u00A0", + "Nopf": "\u2115", + "Not": "\u2AEC", + "NotCongruent": "\u2262", + "NotCupCap": "\u226D", + "NotDoubleVerticalBar": "\u2226", + "NotElement": "\u2209", + "NotEqual": "\u2260", + "NotEqualTilde": "\u2242\u0338", + "NotExists": "\u2204", + "NotGreater": "\u226F", + "NotGreaterEqual": "\u2271", + "NotGreaterFullEqual": "\u2267\u0338", + "NotGreaterGreater": "\u226B\u0338", + "NotGreaterLess": "\u2279", + "NotGreaterSlantEqual": "\u2A7E\u0338", + "NotGreaterTilde": "\u2275", + "NotHumpDownHump": "\u224E\u0338", + "NotHumpEqual": "\u224F\u0338", + "NotLeftTriangle": "\u22EA", + "NotLeftTriangleBar": "\u29CF\u0338", + "NotLeftTriangleEqual": "\u22EC", + "NotLess": "\u226E", + "NotLessEqual": "\u2270", + "NotLessGreater": "\u2278", + "NotLessLess": "\u226A\u0338", + "NotLessSlantEqual": "\u2A7D\u0338", + "NotLessTilde": "\u2274", + "NotNestedGreaterGreater": "\u2AA2\u0338", + "NotNestedLessLess": "\u2AA1\u0338", + "NotPrecedes": "\u2280", + "NotPrecedesEqual": "\u2AAF\u0338", + "NotPrecedesSlantEqual": "\u22E0", + "NotReverseElement": "\u220C", + "NotRightTriangle": "\u22EB", + "NotRightTriangleBar": "\u29D0\u0338", + "NotRightTriangleEqual": "\u22ED", + "NotSquareSubset": "\u228F\u0338", + "NotSquareSubsetEqual": "\u22E2", + "NotSquareSuperset": "\u2290\u0338", + "NotSquareSupersetEqual": "\u22E3", + "NotSubset": "\u2282\u20D2", + "NotSubsetEqual": "\u2288", + "NotSucceeds": "\u2281", + "NotSucceedsEqual": "\u2AB0\u0338", + "NotSucceedsSlantEqual": "\u22E1", + "NotSucceedsTilde": "\u227F\u0338", + "NotSuperset": "\u2283\u20D2", + "NotSupersetEqual": "\u2289", + "NotTilde": "\u2241", + "NotTildeEqual": "\u2244", + "NotTildeFullEqual": "\u2247", + "NotTildeTilde": "\u2249", + "NotVerticalBar": "\u2224", + "Nscr": "\U0001D4A9", + "Ntilde": "\u00D1", + "Nu": "\u039D", + "OElig": "\u0152", + "Oacute": "\u00D3", + "Ocirc": "\u00D4", + "Ocy": "\u041E", + "Odblac": "\u0150", + "Ofr": "\U0001D512", + "Ograve": "\u00D2", + "Omacr": "\u014C", + "Omega": "\u03A9", + "Omicron": "\u039F", + "Oopf": "\U0001D546", + "OpenCurlyDoubleQuote": "\u201C", + "OpenCurlyQuote": "\u2018", + "Or": "\u2A54", + "Oscr": "\U0001D4AA", + "Oslash": "\u00D8", + "Otilde": "\u00D5", + "Otimes": "\u2A37", + "Ouml": "\u00D6", + "OverBar": "\u203E", + "OverBrace": "\u23DE", + "OverBracket": "\u23B4", + "OverParenthesis": "\u23DC", + "PartialD": "\u2202", + "Pcy": "\u041F", + "Pfr": "\U0001D513", + "Phi": "\u03A6", + "Pi": "\u03A0", + "PlusMinus": "\u00B1", + "Poincareplane": "\u210C", + "Popf": "\u2119", + "Pr": "\u2ABB", + "Precedes": "\u227A", + "PrecedesEqual": "\u2AAF", + "PrecedesSlantEqual": "\u227C", + "PrecedesTilde": "\u227E", + "Prime": "\u2033", + "Product": "\u220F", + "Proportion": "\u2237", + "Proportional": "\u221D", + "Pscr": "\U0001D4AB", + "Psi": "\u03A8", + "QUOT": "\u0022", + "Qfr": "\U0001D514", + "Qopf": "\u211A", + "Qscr": "\U0001D4AC", + "RBarr": "\u2910", + "REG": "\u00AE", + "Racute": "\u0154", + "Rang": "\u27EB", + "Rarr": "\u21A0", + "Rarrtl": "\u2916", + "Rcaron": "\u0158", + "Rcedil": "\u0156", + "Rcy": "\u0420", + "Re": "\u211C", + "ReverseElement": "\u220B", + "ReverseEquilibrium": "\u21CB", + "ReverseUpEquilibrium": "\u296F", + "Rfr": "\u211C", + "Rho": "\u03A1", + "RightAngleBracket": "\u27E9", + "RightArrow": "\u2192", + "RightArrowBar": "\u21E5", + "RightArrowLeftArrow": "\u21C4", + "RightCeiling": "\u2309", + "RightDoubleBracket": "\u27E7", + "RightDownTeeVector": "\u295D", + "RightDownVector": "\u21C2", + "RightDownVectorBar": "\u2955", + "RightFloor": "\u230B", + "RightTee": "\u22A2", + "RightTeeArrow": "\u21A6", + "RightTeeVector": "\u295B", + "RightTriangle": "\u22B3", + "RightTriangleBar": "\u29D0", + "RightTriangleEqual": "\u22B5", + "RightUpDownVector": "\u294F", + "RightUpTeeVector": "\u295C", + "RightUpVector": "\u21BE", + "RightUpVectorBar": "\u2954", + "RightVector": "\u21C0", + "RightVectorBar": "\u2953", + "Rightarrow": "\u21D2", + "Ropf": "\u211D", + "RoundImplies": "\u2970", + "Rrightarrow": "\u21DB", + "Rscr": "\u211B", + "Rsh": "\u21B1", + "RuleDelayed": "\u29F4", + "SHCHcy": "\u0429", + "SHcy": "\u0428", + "SOFTcy": "\u042C", + "Sacute": "\u015A", + "Sc": "\u2ABC", + "Scaron": "\u0160", + "Scedil": "\u015E", + "Scirc": "\u015C", + "Scy": "\u0421", + "Sfr": "\U0001D516", + "ShortDownArrow": "\u2193", + "ShortLeftArrow": "\u2190", + "ShortRightArrow": "\u2192", + "ShortUpArrow": "\u2191", + "Sigma": "\u03A3", + "SmallCircle": "\u2218", + "Sopf": "\U0001D54A", + "Sqrt": "\u221A", + "Square": "\u25A1", + "SquareIntersection": "\u2293", + "SquareSubset": "\u228F", + "SquareSubsetEqual": "\u2291", + "SquareSuperset": "\u2290", + "SquareSupersetEqual": "\u2292", + "SquareUnion": "\u2294", + "Sscr": "\U0001D4AE", + "Star": "\u22C6", + "Sub": "\u22D0", + "Subset": "\u22D0", + "SubsetEqual": "\u2286", + "Succeeds": "\u227B", + "SucceedsEqual": "\u2AB0", + "SucceedsSlantEqual": "\u227D", + "SucceedsTilde": "\u227F", + "SuchThat": "\u220B", + "Sum": "\u2211", + "Sup": "\u22D1", + "Superset": "\u2283", + "SupersetEqual": "\u2287", + "Supset": "\u22D1", + "THORN": "\u00DE", + "TRADE": "\u2122", + "TSHcy": "\u040B", + "TScy": "\u0426", + "Tab": "\u0009", + "Tau": "\u03A4", + "Tcaron": "\u0164", + "Tcedil": "\u0162", + "Tcy": "\u0422", + "Tfr": "\U0001D517", + "Therefore": "\u2234", + "Theta": "\u0398", + "ThickSpace": "\u205F\u200A", + "ThinSpace": "\u2009", + "Tilde": "\u223C", + "TildeEqual": "\u2243", + "TildeFullEqual": "\u2245", + "TildeTilde": "\u2248", + "Topf": "\U0001D54B", + "TripleDot": "\u20DB", + "Tscr": "\U0001D4AF", + "Tstrok": "\u0166", + "Uacute": "\u00DA", + "Uarr": "\u219F", + "Uarrocir": "\u2949", + "Ubrcy": "\u040E", + "Ubreve": "\u016C", + "Ucirc": "\u00DB", + "Ucy": "\u0423", + "Udblac": "\u0170", + "Ufr": "\U0001D518", + "Ugrave": "\u00D9", + "Umacr": "\u016A", + "UnderBar": "\u005F", + "UnderBrace": "\u23DF", + "UnderBracket": "\u23B5", + "UnderParenthesis": "\u23DD", + "Union": "\u22C3", + "UnionPlus": "\u228E", + "Uogon": "\u0172", + "Uopf": "\U0001D54C", + "UpArrow": "\u2191", + "UpArrowBar": "\u2912", + "UpArrowDownArrow": "\u21C5", + "UpDownArrow": "\u2195", + "UpEquilibrium": "\u296E", + "UpTee": "\u22A5", + "UpTeeArrow": "\u21A5", + "Uparrow": "\u21D1", + "Updownarrow": "\u21D5", + "UpperLeftArrow": "\u2196", + "UpperRightArrow": "\u2197", + "Upsi": "\u03D2", + "Upsilon": "\u03A5", + "Uring": "\u016E", + "Uscr": "\U0001D4B0", + "Utilde": "\u0168", + "Uuml": "\u00DC", + "VDash": "\u22AB", + "Vbar": "\u2AEB", + "Vcy": "\u0412", + "Vdash": "\u22A9", + "Vdashl": "\u2AE6", + "Vee": "\u22C1", + "Verbar": "\u2016", + "Vert": "\u2016", + "VerticalBar": "\u2223", + "VerticalLine": "\u007C", + "VerticalSeparator": "\u2758", + "VerticalTilde": "\u2240", + "VeryThinSpace": "\u200A", + "Vfr": "\U0001D519", + "Vopf": "\U0001D54D", + "Vscr": "\U0001D4B1", + "Vvdash": "\u22AA", + "Wcirc": "\u0174", + "Wedge": "\u22C0", + "Wfr": "\U0001D51A", + "Wopf": "\U0001D54E", + "Wscr": "\U0001D4B2", + "Xfr": "\U0001D51B", + "Xi": "\u039E", + "Xopf": "\U0001D54F", + "Xscr": "\U0001D4B3", + "YAcy": "\u042F", + "YIcy": "\u0407", + "YUcy": "\u042E", + "Yacute": "\u00DD", + "Ycirc": "\u0176", + "Ycy": "\u042B", + "Yfr": "\U0001D51C", + "Yopf": "\U0001D550", + "Yscr": "\U0001D4B4", + "Yuml": "\u0178", + "ZHcy": "\u0416", + "Zacute": "\u0179", + "Zcaron": "\u017D", + "Zcy": "\u0417", + "Zdot": "\u017B", + "ZeroWidthSpace": "\u200B", + "Zeta": "\u0396", + "Zfr": "\u2128", + "Zopf": "\u2124", + "Zscr": "\U0001D4B5", + "aacute": "\u00E1", + "abreve": "\u0103", + "ac": "\u223E", + "acE": "\u223E\u0333", + "acd": "\u223F", + "acirc": "\u00E2", + "acute": "\u00B4", + "acy": "\u0430", + "aelig": "\u00E6", + "af": "\u2061", + "afr": "\U0001D51E", + "agrave": "\u00E0", + "alefsym": "\u2135", + "aleph": "\u2135", + "alpha": "\u03B1", + "amacr": "\u0101", + "amalg": "\u2A3F", + "amp": "\u0026", + "and": "\u2227", + "andand": "\u2A55", + "andd": "\u2A5C", + "andslope": "\u2A58", + "andv": "\u2A5A", + "ang": "\u2220", + "ange": "\u29A4", + "angle": "\u2220", + "angmsd": "\u2221", + "angmsdaa": "\u29A8", + "angmsdab": "\u29A9", + "angmsdac": "\u29AA", + "angmsdad": "\u29AB", + "angmsdae": "\u29AC", + "angmsdaf": "\u29AD", + "angmsdag": "\u29AE", + "angmsdah": "\u29AF", + "angrt": "\u221F", + "angrtvb": "\u22BE", + "angrtvbd": "\u299D", + "angsph": "\u2222", + "angst": "\u00C5", + "angzarr": "\u237C", + "aogon": "\u0105", + "aopf": "\U0001D552", + "ap": "\u2248", + "apE": "\u2A70", + "apacir": "\u2A6F", + "ape": "\u224A", + "apid": "\u224B", + "apos": "\u0027", + "approx": "\u2248", + "approxeq": "\u224A", + "aring": "\u00E5", + "ascr": "\U0001D4B6", + "ast": "\u002A", + "asymp": "\u2248", + "asympeq": "\u224D", + "atilde": "\u00E3", + "auml": "\u00E4", + "awconint": "\u2233", + "awint": "\u2A11", + "bNot": "\u2AED", + "backcong": "\u224C", + "backepsilon": "\u03F6", + "backprime": "\u2035", + "backsim": "\u223D", + "backsimeq": "\u22CD", + "barvee": "\u22BD", + "barwed": "\u2305", + "barwedge": "\u2305", + "bbrk": "\u23B5", + "bbrktbrk": "\u23B6", + "bcong": "\u224C", + "bcy": "\u0431", + "bdquo": "\u201E", + "becaus": "\u2235", + "because": "\u2235", + "bemptyv": "\u29B0", + "bepsi": "\u03F6", + "bernou": "\u212C", + "beta": "\u03B2", + "beth": "\u2136", + "between": "\u226C", + "bfr": "\U0001D51F", + "bigcap": "\u22C2", + "bigcirc": "\u25EF", + "bigcup": "\u22C3", + "bigodot": "\u2A00", + "bigoplus": "\u2A01", + "bigotimes": "\u2A02", + "bigsqcup": "\u2A06", + "bigstar": "\u2605", + "bigtriangledown": "\u25BD", + "bigtriangleup": "\u25B3", + "biguplus": "\u2A04", + "bigvee": "\u22C1", + "bigwedge": "\u22C0", + "bkarow": "\u290D", + "blacklozenge": "\u29EB", + "blacksquare": "\u25AA", + "blacktriangle": "\u25B4", + "blacktriangledown": "\u25BE", + "blacktriangleleft": "\u25C2", + "blacktriangleright": "\u25B8", + "blank": "\u2423", + "blk12": "\u2592", + "blk14": "\u2591", + "blk34": "\u2593", + "block": "\u2588", + "bne": "\u003D\u20E5", + "bnequiv": "\u2261\u20E5", + "bnot": "\u2310", + "bopf": "\U0001D553", + "bot": "\u22A5", + "bottom": "\u22A5", + "bowtie": "\u22C8", + "boxDL": "\u2557", + "boxDR": "\u2554", + "boxDl": "\u2556", + "boxDr": "\u2553", + "boxH": "\u2550", + "boxHD": "\u2566", + "boxHU": "\u2569", + "boxHd": "\u2564", + "boxHu": "\u2567", + "boxUL": "\u255D", + "boxUR": "\u255A", + "boxUl": "\u255C", + "boxUr": "\u2559", + "boxV": "\u2551", + "boxVH": "\u256C", + "boxVL": "\u2563", + "boxVR": "\u2560", + "boxVh": "\u256B", + "boxVl": "\u2562", + "boxVr": "\u255F", + "boxbox": "\u29C9", + "boxdL": "\u2555", + "boxdR": "\u2552", + "boxdl": "\u2510", + "boxdr": "\u250C", + "boxh": "\u2500", + "boxhD": "\u2565", + "boxhU": "\u2568", + "boxhd": "\u252C", + "boxhu": "\u2534", + "boxminus": "\u229F", + "boxplus": "\u229E", + "boxtimes": "\u22A0", + "boxuL": "\u255B", + "boxuR": "\u2558", + "boxul": "\u2518", + "boxur": "\u2514", + "boxv": "\u2502", + "boxvH": "\u256A", + "boxvL": "\u2561", + "boxvR": "\u255E", + "boxvh": "\u253C", + "boxvl": "\u2524", + "boxvr": "\u251C", + "bprime": "\u2035", + "breve": "\u02D8", + "brvbar": "\u00A6", + "bscr": "\U0001D4B7", + "bsemi": "\u204F", + "bsim": "\u223D", + "bsime": "\u22CD", + "bsol": "\u005C", + "bsolb": "\u29C5", + "bsolhsub": "\u27C8", + "bull": "\u2022", + "bullet": "\u2022", + "bump": "\u224E", + "bumpE": "\u2AAE", + "bumpe": "\u224F", + "bumpeq": "\u224F", + "cacute": "\u0107", + "cap": "\u2229", + "capand": "\u2A44", + "capbrcup": "\u2A49", + "capcap": "\u2A4B", + "capcup": "\u2A47", + "capdot": "\u2A40", + "caps": "\u2229\uFE00", + "caret": "\u2041", + "caron": "\u02C7", + "ccaps": "\u2A4D", + "ccaron": "\u010D", + "ccedil": "\u00E7", + "ccirc": "\u0109", + "ccups": "\u2A4C", + "ccupssm": "\u2A50", + "cdot": "\u010B", + "cedil": "\u00B8", + "cemptyv": "\u29B2", + "cent": "\u00A2", + "centerdot": "\u00B7", + "cfr": "\U0001D520", + "chcy": "\u0447", + "check": "\u2713", + "checkmark": "\u2713", + "chi": "\u03C7", + "cir": "\u25CB", + "cirE": "\u29C3", + "circ": "\u02C6", + "circeq": "\u2257", + "circlearrowleft": "\u21BA", + "circlearrowright": "\u21BB", + "circledR": "\u00AE", + "circledS": "\u24C8", + "circledast": "\u229B", + "circledcirc": "\u229A", + "circleddash": "\u229D", + "cire": "\u2257", + "cirfnint": "\u2A10", + "cirmid": "\u2AEF", + "cirscir": "\u29C2", + "clubs": "\u2663", + "clubsuit": "\u2663", + "colon": "\u003A", + "colone": "\u2254", + "coloneq": "\u2254", + "comma": "\u002C", + "commat": "\u0040", + "comp": "\u2201", + "compfn": "\u2218", + "complement": "\u2201", + "complexes": "\u2102", + "cong": "\u2245", + "congdot": "\u2A6D", + "conint": "\u222E", + "copf": "\U0001D554", + "coprod": "\u2210", + "copy": "\u00A9", + "copysr": "\u2117", + "crarr": "\u21B5", + "cross": "\u2717", + "cscr": "\U0001D4B8", + "csub": "\u2ACF", + "csube": "\u2AD1", + "csup": "\u2AD0", + "csupe": "\u2AD2", + "ctdot": "\u22EF", + "cudarrl": "\u2938", + "cudarrr": "\u2935", + "cuepr": "\u22DE", + "cuesc": "\u22DF", + "cularr": "\u21B6", + "cularrp": "\u293D", + "cup": "\u222A", + "cupbrcap": "\u2A48", + "cupcap": "\u2A46", + "cupcup": "\u2A4A", + "cupdot": "\u228D", + "cupor": "\u2A45", + "cups": "\u222A\uFE00", + "curarr": "\u21B7", + "curarrm": "\u293C", + "curlyeqprec": "\u22DE", + "curlyeqsucc": "\u22DF", + "curlyvee": "\u22CE", + "curlywedge": "\u22CF", + "curren": "\u00A4", + "curvearrowleft": "\u21B6", + "curvearrowright": "\u21B7", + "cuvee": "\u22CE", + "cuwed": "\u22CF", + "cwconint": "\u2232", + "cwint": "\u2231", + "cylcty": "\u232D", + "dArr": "\u21D3", + "dHar": "\u2965", + "dagger": "\u2020", + "daleth": "\u2138", + "darr": "\u2193", + "dash": "\u2010", + "dashv": "\u22A3", + "dbkarow": "\u290F", + "dblac": "\u02DD", + "dcaron": "\u010F", + "dcy": "\u0434", + "dd": "\u2146", + "ddagger": "\u2021", + "ddarr": "\u21CA", + "ddotseq": "\u2A77", + "deg": "\u00B0", + "delta": "\u03B4", + "demptyv": "\u29B1", + "dfisht": "\u297F", + "dfr": "\U0001D521", + "dharl": "\u21C3", + "dharr": "\u21C2", + "diam": "\u22C4", + "diamond": "\u22C4", + "diamondsuit": "\u2666", + "diams": "\u2666", + "die": "\u00A8", + "digamma": "\u03DD", + "disin": "\u22F2", + "div": "\u00F7", + "divide": "\u00F7", + "divideontimes": "\u22C7", + "divonx": "\u22C7", + "djcy": "\u0452", + "dlcorn": "\u231E", + "dlcrop": "\u230D", + "dollar": "\u0024", + "dopf": "\U0001D555", + "dot": "\u02D9", + "doteq": "\u2250", + "doteqdot": "\u2251", + "dotminus": "\u2238", + "dotplus": "\u2214", + "dotsquare": "\u22A1", + "doublebarwedge": "\u2306", + "downarrow": "\u2193", + "downdownarrows": "\u21CA", + "downharpoonleft": "\u21C3", + "downharpoonright": "\u21C2", + "drbkarow": "\u2910", + "drcorn": "\u231F", + "drcrop": "\u230C", + "dscr": "\U0001D4B9", + "dscy": "\u0455", + "dsol": "\u29F6", + "dstrok": "\u0111", + "dtdot": "\u22F1", + "dtri": "\u25BF", + "dtrif": "\u25BE", + "duarr": "\u21F5", + "duhar": "\u296F", + "dwangle": "\u29A6", + "dzcy": "\u045F", + "dzigrarr": "\u27FF", + "eDDot": "\u2A77", + "eDot": "\u2251", + "eacute": "\u00E9", + "easter": "\u2A6E", + "ecaron": "\u011B", + "ecir": "\u2256", + "ecirc": "\u00EA", + "ecolon": "\u2255", + "ecy": "\u044D", + "edot": "\u0117", + "ee": "\u2147", + "efDot": "\u2252", + "efr": "\U0001D522", + "eg": "\u2A9A", + "egrave": "\u00E8", + "egs": "\u2A96", + "egsdot": "\u2A98", + "el": "\u2A99", + "elinters": "\u23E7", + "ell": "\u2113", + "els": "\u2A95", + "elsdot": "\u2A97", + "emacr": "\u0113", + "empty": "\u2205", + "emptyset": "\u2205", + "emptyv": "\u2205", + "emsp": "\u2003", + "emsp13": "\u2004", + "emsp14": "\u2005", + "eng": "\u014B", + "ensp": "\u2002", + "eogon": "\u0119", + "eopf": "\U0001D556", + "epar": "\u22D5", + "eparsl": "\u29E3", + "eplus": "\u2A71", + "epsi": "\u03B5", + "epsilon": "\u03B5", + "epsiv": "\u03F5", + "eqcirc": "\u2256", + "eqcolon": "\u2255", + "eqsim": "\u2242", + "eqslantgtr": "\u2A96", + "eqslantless": "\u2A95", + "equals": "\u003D", + "equest": "\u225F", + "equiv": "\u2261", + "equivDD": "\u2A78", + "eqvparsl": "\u29E5", + "erDot": "\u2253", + "erarr": "\u2971", + "escr": "\u212F", + "esdot": "\u2250", + "esim": "\u2242", + "eta": "\u03B7", + "eth": "\u00F0", + "euml": "\u00EB", + "euro": "\u20AC", + "excl": "\u0021", + "exist": "\u2203", + "expectation": "\u2130", + "exponentiale": "\u2147", + "fallingdotseq": "\u2252", + "fcy": "\u0444", + "female": "\u2640", + "ffilig": "\uFB03", + "fflig": "\uFB00", + "ffllig": "\uFB04", + "ffr": "\U0001D523", + "filig": "\uFB01", + "fjlig": "\u0066\u006A", + "flat": "\u266D", + "fllig": "\uFB02", + "fltns": "\u25B1", + "fnof": "\u0192", + "fopf": "\U0001D557", + "forall": "\u2200", + "fork": "\u22D4", + "forkv": "\u2AD9", + "fpartint": "\u2A0D", + "frac12": "\u00BD", + "frac13": "\u2153", + "frac14": "\u00BC", + "frac15": "\u2155", + "frac16": "\u2159", + "frac18": "\u215B", + "frac23": "\u2154", + "frac25": "\u2156", + "frac34": "\u00BE", + "frac35": "\u2157", + "frac38": "\u215C", + "frac45": "\u2158", + "frac56": "\u215A", + "frac58": "\u215D", + "frac78": "\u215E", + "frasl": "\u2044", + "frown": "\u2322", + "fscr": "\U0001D4BB", + "gE": "\u2267", + "gEl": "\u2A8C", + "gacute": "\u01F5", + "gamma": "\u03B3", + "gammad": "\u03DD", + "gap": "\u2A86", + "gbreve": "\u011F", + "gcirc": "\u011D", + "gcy": "\u0433", + "gdot": "\u0121", + "ge": "\u2265", + "gel": "\u22DB", + "geq": "\u2265", + "geqq": "\u2267", + "geqslant": "\u2A7E", + "ges": "\u2A7E", + "gescc": "\u2AA9", + "gesdot": "\u2A80", + "gesdoto": "\u2A82", + "gesdotol": "\u2A84", + "gesl": "\u22DB\uFE00", + "gesles": "\u2A94", + "gfr": "\U0001D524", + "gg": "\u226B", + "ggg": "\u22D9", + "gimel": "\u2137", + "gjcy": "\u0453", + "gl": "\u2277", + "glE": "\u2A92", + "gla": "\u2AA5", + "glj": "\u2AA4", + "gnE": "\u2269", + "gnap": "\u2A8A", + "gnapprox": "\u2A8A", + "gne": "\u2A88", + "gneq": "\u2A88", + "gneqq": "\u2269", + "gnsim": "\u22E7", + "gopf": "\U0001D558", + "grave": "\u0060", + "gscr": "\u210A", + "gsim": "\u2273", + "gsime": "\u2A8E", + "gsiml": "\u2A90", + "gt": "\u003E", + "gtcc": "\u2AA7", + "gtcir": "\u2A7A", + "gtdot": "\u22D7", + "gtlPar": "\u2995", + "gtquest": "\u2A7C", + "gtrapprox": "\u2A86", + "gtrarr": "\u2978", + "gtrdot": "\u22D7", + "gtreqless": "\u22DB", + "gtreqqless": "\u2A8C", + "gtrless": "\u2277", + "gtrsim": "\u2273", + "gvertneqq": "\u2269\uFE00", + "gvnE": "\u2269\uFE00", + "hArr": "\u21D4", + "hairsp": "\u200A", + "half": "\u00BD", + "hamilt": "\u210B", + "hardcy": "\u044A", + "harr": "\u2194", + "harrcir": "\u2948", + "harrw": "\u21AD", + "hbar": "\u210F", + "hcirc": "\u0125", + "hearts": "\u2665", + "heartsuit": "\u2665", + "hellip": "\u2026", + "hercon": "\u22B9", + "hfr": "\U0001D525", + "hksearow": "\u2925", + "hkswarow": "\u2926", + "hoarr": "\u21FF", + "homtht": "\u223B", + "hookleftarrow": "\u21A9", + "hookrightarrow": "\u21AA", + "hopf": "\U0001D559", + "horbar": "\u2015", + "hscr": "\U0001D4BD", + "hslash": "\u210F", + "hstrok": "\u0127", + "hybull": "\u2043", + "hyphen": "\u2010", + "iacute": "\u00ED", + "ic": "\u2063", + "icirc": "\u00EE", + "icy": "\u0438", + "iecy": "\u0435", + "iexcl": "\u00A1", + "iff": "\u21D4", + "ifr": "\U0001D526", + "igrave": "\u00EC", + "ii": "\u2148", + "iiiint": "\u2A0C", + "iiint": "\u222D", + "iinfin": "\u29DC", + "iiota": "\u2129", + "ijlig": "\u0133", + "imacr": "\u012B", + "image": "\u2111", + "imagline": "\u2110", + "imagpart": "\u2111", + "imath": "\u0131", + "imof": "\u22B7", + "imped": "\u01B5", + "in": "\u2208", + "incare": "\u2105", + "infin": "\u221E", + "infintie": "\u29DD", + "inodot": "\u0131", + "int": "\u222B", + "intcal": "\u22BA", + "integers": "\u2124", + "intercal": "\u22BA", + "intlarhk": "\u2A17", + "intprod": "\u2A3C", + "iocy": "\u0451", + "iogon": "\u012F", + "iopf": "\U0001D55A", + "iota": "\u03B9", + "iprod": "\u2A3C", + "iquest": "\u00BF", + "iscr": "\U0001D4BE", + "isin": "\u2208", + "isinE": "\u22F9", + "isindot": "\u22F5", + "isins": "\u22F4", + "isinsv": "\u22F3", + "isinv": "\u2208", + "it": "\u2062", + "itilde": "\u0129", + "iukcy": "\u0456", + "iuml": "\u00EF", + "jcirc": "\u0135", + "jcy": "\u0439", + "jfr": "\U0001D527", + "jmath": "\u0237", + "jopf": "\U0001D55B", + "jscr": "\U0001D4BF", + "jsercy": "\u0458", + "jukcy": "\u0454", + "kappa": "\u03BA", + "kappav": "\u03F0", + "kcedil": "\u0137", + "kcy": "\u043A", + "kfr": "\U0001D528", + "kgreen": "\u0138", + "khcy": "\u0445", + "kjcy": "\u045C", + "kopf": "\U0001D55C", + "kscr": "\U0001D4C0", + "lAarr": "\u21DA", + "lArr": "\u21D0", + "lAtail": "\u291B", + "lBarr": "\u290E", + "lE": "\u2266", + "lEg": "\u2A8B", + "lHar": "\u2962", + "lacute": "\u013A", + "laemptyv": "\u29B4", + "lagran": "\u2112", + "lambda": "\u03BB", + "lang": "\u27E8", + "langd": "\u2991", + "langle": "\u27E8", + "lap": "\u2A85", + "laquo": "\u00AB", + "larr": "\u2190", + "larrb": "\u21E4", + "larrbfs": "\u291F", + "larrfs": "\u291D", + "larrhk": "\u21A9", + "larrlp": "\u21AB", + "larrpl": "\u2939", + "larrsim": "\u2973", + "larrtl": "\u21A2", + "lat": "\u2AAB", + "latail": "\u2919", + "late": "\u2AAD", + "lates": "\u2AAD\uFE00", + "lbarr": "\u290C", + "lbbrk": "\u2772", + "lbrace": "\u007B", + "lbrack": "\u005B", + "lbrke": "\u298B", + "lbrksld": "\u298F", + "lbrkslu": "\u298D", + "lcaron": "\u013E", + "lcedil": "\u013C", + "lceil": "\u2308", + "lcub": "\u007B", + "lcy": "\u043B", + "ldca": "\u2936", + "ldquo": "\u201C", + "ldquor": "\u201E", + "ldrdhar": "\u2967", + "ldrushar": "\u294B", + "ldsh": "\u21B2", + "le": "\u2264", + "leftarrow": "\u2190", + "leftarrowtail": "\u21A2", + "leftharpoondown": "\u21BD", + "leftharpoonup": "\u21BC", + "leftleftarrows": "\u21C7", + "leftrightarrow": "\u2194", + "leftrightarrows": "\u21C6", + "leftrightharpoons": "\u21CB", + "leftrightsquigarrow": "\u21AD", + "leftthreetimes": "\u22CB", + "leg": "\u22DA", + "leq": "\u2264", + "leqq": "\u2266", + "leqslant": "\u2A7D", + "les": "\u2A7D", + "lescc": "\u2AA8", + "lesdot": "\u2A7F", + "lesdoto": "\u2A81", + "lesdotor": "\u2A83", + "lesg": "\u22DA\uFE00", + "lesges": "\u2A93", + "lessapprox": "\u2A85", + "lessdot": "\u22D6", + "lesseqgtr": "\u22DA", + "lesseqqgtr": "\u2A8B", + "lessgtr": "\u2276", + "lesssim": "\u2272", + "lfisht": "\u297C", + "lfloor": "\u230A", + "lfr": "\U0001D529", + "lg": "\u2276", + "lgE": "\u2A91", + "lhard": "\u21BD", + "lharu": "\u21BC", + "lharul": "\u296A", + "lhblk": "\u2584", + "ljcy": "\u0459", + "ll": "\u226A", + "llarr": "\u21C7", + "llcorner": "\u231E", + "llhard": "\u296B", + "lltri": "\u25FA", + "lmidot": "\u0140", + "lmoust": "\u23B0", + "lmoustache": "\u23B0", + "lnE": "\u2268", + "lnap": "\u2A89", + "lnapprox": "\u2A89", + "lne": "\u2A87", + "lneq": "\u2A87", + "lneqq": "\u2268", + "lnsim": "\u22E6", + "loang": "\u27EC", + "loarr": "\u21FD", + "lobrk": "\u27E6", + "longleftarrow": "\u27F5", + "longleftrightarrow": "\u27F7", + "longmapsto": "\u27FC", + "longrightarrow": "\u27F6", + "looparrowleft": "\u21AB", + "looparrowright": "\u21AC", + "lopar": "\u2985", + "lopf": "\U0001D55D", + "loplus": "\u2A2D", + "lotimes": "\u2A34", + "lowast": "\u2217", + "lowbar": "\u005F", + "loz": "\u25CA", + "lozenge": "\u25CA", + "lozf": "\u29EB", + "lpar": "\u0028", + "lparlt": "\u2993", + "lrarr": "\u21C6", + "lrcorner": "\u231F", + "lrhar": "\u21CB", + "lrhard": "\u296D", + "lrm": "\u200E", + "lrtri": "\u22BF", + "lsaquo": "\u2039", + "lscr": "\U0001D4C1", + "lsh": "\u21B0", + "lsim": "\u2272", + "lsime": "\u2A8D", + "lsimg": "\u2A8F", + "lsqb": "\u005B", + "lsquo": "\u2018", + "lsquor": "\u201A", + "lstrok": "\u0142", + "lt": "\u003C", + "ltcc": "\u2AA6", + "ltcir": "\u2A79", + "ltdot": "\u22D6", + "lthree": "\u22CB", + "ltimes": "\u22C9", + "ltlarr": "\u2976", + "ltquest": "\u2A7B", + "ltrPar": "\u2996", + "ltri": "\u25C3", + "ltrie": "\u22B4", + "ltrif": "\u25C2", + "lurdshar": "\u294A", + "luruhar": "\u2966", + "lvertneqq": "\u2268\uFE00", + "lvnE": "\u2268\uFE00", + "mDDot": "\u223A", + "macr": "\u00AF", + "male": "\u2642", + "malt": "\u2720", + "maltese": "\u2720", + "map": "\u21A6", + "mapsto": "\u21A6", + "mapstodown": "\u21A7", + "mapstoleft": "\u21A4", + "mapstoup": "\u21A5", + "marker": "\u25AE", + "mcomma": "\u2A29", + "mcy": "\u043C", + "mdash": "\u2014", + "measuredangle": "\u2221", + "mfr": "\U0001D52A", + "mho": "\u2127", + "micro": "\u00B5", + "mid": "\u2223", + "midast": "\u002A", + "midcir": "\u2AF0", + "middot": "\u00B7", + "minus": "\u2212", + "minusb": "\u229F", + "minusd": "\u2238", + "minusdu": "\u2A2A", + "mlcp": "\u2ADB", + "mldr": "\u2026", + "mnplus": "\u2213", + "models": "\u22A7", + "mopf": "\U0001D55E", + "mp": "\u2213", + "mscr": "\U0001D4C2", + "mstpos": "\u223E", + "mu": "\u03BC", + "multimap": "\u22B8", + "mumap": "\u22B8", + "nGg": "\u22D9\u0338", + "nGt": "\u226B\u20D2", + "nGtv": "\u226B\u0338", + "nLeftarrow": "\u21CD", + "nLeftrightarrow": "\u21CE", + "nLl": "\u22D8\u0338", + "nLt": "\u226A\u20D2", + "nLtv": "\u226A\u0338", + "nRightarrow": "\u21CF", + "nVDash": "\u22AF", + "nVdash": "\u22AE", + "nabla": "\u2207", + "nacute": "\u0144", + "nang": "\u2220\u20D2", + "nap": "\u2249", + "napE": "\u2A70\u0338", + "napid": "\u224B\u0338", + "napos": "\u0149", + "napprox": "\u2249", + "natur": "\u266E", + "natural": "\u266E", + "naturals": "\u2115", + "nbsp": "\u00A0", + "nbump": "\u224E\u0338", + "nbumpe": "\u224F\u0338", + "ncap": "\u2A43", + "ncaron": "\u0148", + "ncedil": "\u0146", + "ncong": "\u2247", + "ncongdot": "\u2A6D\u0338", + "ncup": "\u2A42", + "ncy": "\u043D", + "ndash": "\u2013", + "ne": "\u2260", + "neArr": "\u21D7", + "nearhk": "\u2924", + "nearr": "\u2197", + "nearrow": "\u2197", + "nedot": "\u2250\u0338", + "nequiv": "\u2262", + "nesear": "\u2928", + "nesim": "\u2242\u0338", + "nexist": "\u2204", + "nexists": "\u2204", + "nfr": "\U0001D52B", + "ngE": "\u2267\u0338", + "nge": "\u2271", + "ngeq": "\u2271", + "ngeqq": "\u2267\u0338", + "ngeqslant": "\u2A7E\u0338", + "nges": "\u2A7E\u0338", + "ngsim": "\u2275", + "ngt": "\u226F", + "ngtr": "\u226F", + "nhArr": "\u21CE", + "nharr": "\u21AE", + "nhpar": "\u2AF2", + "ni": "\u220B", + "nis": "\u22FC", + "nisd": "\u22FA", + "niv": "\u220B", + "njcy": "\u045A", + "nlArr": "\u21CD", + "nlE": "\u2266\u0338", + "nlarr": "\u219A", + "nldr": "\u2025", + "nle": "\u2270", + "nleftarrow": "\u219A", + "nleftrightarrow": "\u21AE", + "nleq": "\u2270", + "nleqq": "\u2266\u0338", + "nleqslant": "\u2A7D\u0338", + "nles": "\u2A7D\u0338", + "nless": "\u226E", + "nlsim": "\u2274", + "nlt": "\u226E", + "nltri": "\u22EA", + "nltrie": "\u22EC", + "nmid": "\u2224", + "nopf": "\U0001D55F", + "not": "\u00AC", + "notin": "\u2209", + "notinE": "\u22F9\u0338", + "notindot": "\u22F5\u0338", + "notinva": "\u2209", + "notinvb": "\u22F7", + "notinvc": "\u22F6", + "notni": "\u220C", + "notniva": "\u220C", + "notnivb": "\u22FE", + "notnivc": "\u22FD", + "npar": "\u2226", + "nparallel": "\u2226", + "nparsl": "\u2AFD\u20E5", + "npart": "\u2202\u0338", + "npolint": "\u2A14", + "npr": "\u2280", + "nprcue": "\u22E0", + "npre": "\u2AAF\u0338", + "nprec": "\u2280", + "npreceq": "\u2AAF\u0338", + "nrArr": "\u21CF", + "nrarr": "\u219B", + "nrarrc": "\u2933\u0338", + "nrarrw": "\u219D\u0338", + "nrightarrow": "\u219B", + "nrtri": "\u22EB", + "nrtrie": "\u22ED", + "nsc": "\u2281", + "nsccue": "\u22E1", + "nsce": "\u2AB0\u0338", + "nscr": "\U0001D4C3", + "nshortmid": "\u2224", + "nshortparallel": "\u2226", + "nsim": "\u2241", + "nsime": "\u2244", + "nsimeq": "\u2244", + "nsmid": "\u2224", + "nspar": "\u2226", + "nsqsube": "\u22E2", + "nsqsupe": "\u22E3", + "nsub": "\u2284", + "nsubE": "\u2AC5\u0338", + "nsube": "\u2288", + "nsubset": "\u2282\u20D2", + "nsubseteq": "\u2288", + "nsubseteqq": "\u2AC5\u0338", + "nsucc": "\u2281", + "nsucceq": "\u2AB0\u0338", + "nsup": "\u2285", + "nsupE": "\u2AC6\u0338", + "nsupe": "\u2289", + "nsupset": "\u2283\u20D2", + "nsupseteq": "\u2289", + "nsupseteqq": "\u2AC6\u0338", + "ntgl": "\u2279", + "ntilde": "\u00F1", + "ntlg": "\u2278", + "ntriangleleft": "\u22EA", + "ntrianglelefteq": "\u22EC", + "ntriangleright": "\u22EB", + "ntrianglerighteq": "\u22ED", + "nu": "\u03BD", + "num": "\u0023", + "numero": "\u2116", + "numsp": "\u2007", + "nvDash": "\u22AD", + "nvHarr": "\u2904", + "nvap": "\u224D\u20D2", + "nvdash": "\u22AC", + "nvge": "\u2265\u20D2", + "nvgt": "\u003E\u20D2", + "nvinfin": "\u29DE", + "nvlArr": "\u2902", + "nvle": "\u2264\u20D2", + "nvlt": "\u003C\u20D2", + "nvltrie": "\u22B4\u20D2", + "nvrArr": "\u2903", + "nvrtrie": "\u22B5\u20D2", + "nvsim": "\u223C\u20D2", + "nwArr": "\u21D6", + "nwarhk": "\u2923", + "nwarr": "\u2196", + "nwarrow": "\u2196", + "nwnear": "\u2927", + "oS": "\u24C8", + "oacute": "\u00F3", + "oast": "\u229B", + "ocir": "\u229A", + "ocirc": "\u00F4", + "ocy": "\u043E", + "odash": "\u229D", + "odblac": "\u0151", + "odiv": "\u2A38", + "odot": "\u2299", + "odsold": "\u29BC", + "oelig": "\u0153", + "ofcir": "\u29BF", + "ofr": "\U0001D52C", + "ogon": "\u02DB", + "ograve": "\u00F2", + "ogt": "\u29C1", + "ohbar": "\u29B5", + "ohm": "\u03A9", + "oint": "\u222E", + "olarr": "\u21BA", + "olcir": "\u29BE", + "olcross": "\u29BB", + "oline": "\u203E", + "olt": "\u29C0", + "omacr": "\u014D", + "omega": "\u03C9", + "omicron": "\u03BF", + "omid": "\u29B6", + "ominus": "\u2296", + "oopf": "\U0001D560", + "opar": "\u29B7", + "operp": "\u29B9", + "oplus": "\u2295", + "or": "\u2228", + "orarr": "\u21BB", + "ord": "\u2A5D", + "order": "\u2134", + "orderof": "\u2134", + "ordf": "\u00AA", + "ordm": "\u00BA", + "origof": "\u22B6", + "oror": "\u2A56", + "orslope": "\u2A57", + "orv": "\u2A5B", + "oscr": "\u2134", + "oslash": "\u00F8", + "osol": "\u2298", + "otilde": "\u00F5", + "otimes": "\u2297", + "otimesas": "\u2A36", + "ouml": "\u00F6", + "ovbar": "\u233D", + "par": "\u2225", + "para": "\u00B6", + "parallel": "\u2225", + "parsim": "\u2AF3", + "parsl": "\u2AFD", + "part": "\u2202", + "pcy": "\u043F", + "percnt": "\u0025", + "period": "\u002E", + "permil": "\u2030", + "perp": "\u22A5", + "pertenk": "\u2031", + "pfr": "\U0001D52D", + "phi": "\u03C6", + "phiv": "\u03D5", + "phmmat": "\u2133", + "phone": "\u260E", + "pi": "\u03C0", + "pitchfork": "\u22D4", + "piv": "\u03D6", + "planck": "\u210F", + "planckh": "\u210E", + "plankv": "\u210F", + "plus": "\u002B", + "plusacir": "\u2A23", + "plusb": "\u229E", + "pluscir": "\u2A22", + "plusdo": "\u2214", + "plusdu": "\u2A25", + "pluse": "\u2A72", + "plusmn": "\u00B1", + "plussim": "\u2A26", + "plustwo": "\u2A27", + "pm": "\u00B1", + "pointint": "\u2A15", + "popf": "\U0001D561", + "pound": "\u00A3", + "pr": "\u227A", + "prE": "\u2AB3", + "prap": "\u2AB7", + "prcue": "\u227C", + "pre": "\u2AAF", + "prec": "\u227A", + "precapprox": "\u2AB7", + "preccurlyeq": "\u227C", + "preceq": "\u2AAF", + "precnapprox": "\u2AB9", + "precneqq": "\u2AB5", + "precnsim": "\u22E8", + "precsim": "\u227E", + "prime": "\u2032", + "primes": "\u2119", + "prnE": "\u2AB5", + "prnap": "\u2AB9", + "prnsim": "\u22E8", + "prod": "\u220F", + "profalar": "\u232E", + "profline": "\u2312", + "profsurf": "\u2313", + "prop": "\u221D", + "propto": "\u221D", + "prsim": "\u227E", + "prurel": "\u22B0", + "pscr": "\U0001D4C5", + "psi": "\u03C8", + "puncsp": "\u2008", + "qfr": "\U0001D52E", + "qint": "\u2A0C", + "qopf": "\U0001D562", + "qprime": "\u2057", + "qscr": "\U0001D4C6", + "quaternions": "\u210D", + "quatint": "\u2A16", + "quest": "\u003F", + "questeq": "\u225F", + "quot": "\u0022", + "rAarr": "\u21DB", + "rArr": "\u21D2", + "rAtail": "\u291C", + "rBarr": "\u290F", + "rHar": "\u2964", + "race": "\u223D\u0331", + "racute": "\u0155", + "radic": "\u221A", + "raemptyv": "\u29B3", + "rang": "\u27E9", + "rangd": "\u2992", + "range": "\u29A5", + "rangle": "\u27E9", + "raquo": "\u00BB", + "rarr": "\u2192", + "rarrap": "\u2975", + "rarrb": "\u21E5", + "rarrbfs": "\u2920", + "rarrc": "\u2933", + "rarrfs": "\u291E", + "rarrhk": "\u21AA", + "rarrlp": "\u21AC", + "rarrpl": "\u2945", + "rarrsim": "\u2974", + "rarrtl": "\u21A3", + "rarrw": "\u219D", + "ratail": "\u291A", + "ratio": "\u2236", + "rationals": "\u211A", + "rbarr": "\u290D", + "rbbrk": "\u2773", + "rbrace": "\u007D", + "rbrack": "\u005D", + "rbrke": "\u298C", + "rbrksld": "\u298E", + "rbrkslu": "\u2990", + "rcaron": "\u0159", + "rcedil": "\u0157", + "rceil": "\u2309", + "rcub": "\u007D", + "rcy": "\u0440", + "rdca": "\u2937", + "rdldhar": "\u2969", + "rdquo": "\u201D", + "rdquor": "\u201D", + "rdsh": "\u21B3", + "real": "\u211C", + "realine": "\u211B", + "realpart": "\u211C", + "reals": "\u211D", + "rect": "\u25AD", + "reg": "\u00AE", + "rfisht": "\u297D", + "rfloor": "\u230B", + "rfr": "\U0001D52F", + "rhard": "\u21C1", + "rharu": "\u21C0", + "rharul": "\u296C", + "rho": "\u03C1", + "rhov": "\u03F1", + "rightarrow": "\u2192", + "rightarrowtail": "\u21A3", + "rightharpoondown": "\u21C1", + "rightharpoonup": "\u21C0", + "rightleftarrows": "\u21C4", + "rightleftharpoons": "\u21CC", + "rightrightarrows": "\u21C9", + "rightsquigarrow": "\u219D", + "rightthreetimes": "\u22CC", + "ring": "\u02DA", + "risingdotseq": "\u2253", + "rlarr": "\u21C4", + "rlhar": "\u21CC", + "rlm": "\u200F", + "rmoust": "\u23B1", + "rmoustache": "\u23B1", + "rnmid": "\u2AEE", + "roang": "\u27ED", + "roarr": "\u21FE", + "robrk": "\u27E7", + "ropar": "\u2986", + "ropf": "\U0001D563", + "roplus": "\u2A2E", + "rotimes": "\u2A35", + "rpar": "\u0029", + "rpargt": "\u2994", + "rppolint": "\u2A12", + "rrarr": "\u21C9", + "rsaquo": "\u203A", + "rscr": "\U0001D4C7", + "rsh": "\u21B1", + "rsqb": "\u005D", + "rsquo": "\u2019", + "rsquor": "\u2019", + "rthree": "\u22CC", + "rtimes": "\u22CA", + "rtri": "\u25B9", + "rtrie": "\u22B5", + "rtrif": "\u25B8", + "rtriltri": "\u29CE", + "ruluhar": "\u2968", + "rx": "\u211E", + "sacute": "\u015B", + "sbquo": "\u201A", + "sc": "\u227B", + "scE": "\u2AB4", + "scap": "\u2AB8", + "scaron": "\u0161", + "sccue": "\u227D", + "sce": "\u2AB0", + "scedil": "\u015F", + "scirc": "\u015D", + "scnE": "\u2AB6", + "scnap": "\u2ABA", + "scnsim": "\u22E9", + "scpolint": "\u2A13", + "scsim": "\u227F", + "scy": "\u0441", + "sdot": "\u22C5", + "sdotb": "\u22A1", + "sdote": "\u2A66", + "seArr": "\u21D8", + "searhk": "\u2925", + "searr": "\u2198", + "searrow": "\u2198", + "sect": "\u00A7", + "semi": "\u003B", + "seswar": "\u2929", + "setminus": "\u2216", + "setmn": "\u2216", + "sext": "\u2736", + "sfr": "\U0001D530", + "sfrown": "\u2322", + "sharp": "\u266F", + "shchcy": "\u0449", + "shcy": "\u0448", + "shortmid": "\u2223", + "shortparallel": "\u2225", + "shy": "\u00AD", + "sigma": "\u03C3", + "sigmaf": "\u03C2", + "sigmav": "\u03C2", + "sim": "\u223C", + "simdot": "\u2A6A", + "sime": "\u2243", + "simeq": "\u2243", + "simg": "\u2A9E", + "simgE": "\u2AA0", + "siml": "\u2A9D", + "simlE": "\u2A9F", + "simne": "\u2246", + "simplus": "\u2A24", + "simrarr": "\u2972", + "slarr": "\u2190", + "smallsetminus": "\u2216", + "smashp": "\u2A33", + "smeparsl": "\u29E4", + "smid": "\u2223", + "smile": "\u2323", + "smt": "\u2AAA", + "smte": "\u2AAC", + "smtes": "\u2AAC\uFE00", + "softcy": "\u044C", + "sol": "\u002F", + "solb": "\u29C4", + "solbar": "\u233F", + "sopf": "\U0001D564", + "spades": "\u2660", + "spadesuit": "\u2660", + "spar": "\u2225", + "sqcap": "\u2293", + "sqcaps": "\u2293\uFE00", + "sqcup": "\u2294", + "sqcups": "\u2294\uFE00", + "sqsub": "\u228F", + "sqsube": "\u2291", + "sqsubset": "\u228F", + "sqsubseteq": "\u2291", + "sqsup": "\u2290", + "sqsupe": "\u2292", + "sqsupset": "\u2290", + "sqsupseteq": "\u2292", + "squ": "\u25A1", + "square": "\u25A1", + "squarf": "\u25AA", + "squf": "\u25AA", + "srarr": "\u2192", + "sscr": "\U0001D4C8", + "ssetmn": "\u2216", + "ssmile": "\u2323", + "sstarf": "\u22C6", + "star": "\u2606", + "starf": "\u2605", + "straightepsilon": "\u03F5", + "straightphi": "\u03D5", + "strns": "\u00AF", + "sub": "\u2282", + "subE": "\u2AC5", + "subdot": "\u2ABD", + "sube": "\u2286", + "subedot": "\u2AC3", + "submult": "\u2AC1", + "subnE": "\u2ACB", + "subne": "\u228A", + "subplus": "\u2ABF", + "subrarr": "\u2979", + "subset": "\u2282", + "subseteq": "\u2286", + "subseteqq": "\u2AC5", + "subsetneq": "\u228A", + "subsetneqq": "\u2ACB", + "subsim": "\u2AC7", + "subsub": "\u2AD5", + "subsup": "\u2AD3", + "succ": "\u227B", + "succapprox": "\u2AB8", + "succcurlyeq": "\u227D", + "succeq": "\u2AB0", + "succnapprox": "\u2ABA", + "succneqq": "\u2AB6", + "succnsim": "\u22E9", + "succsim": "\u227F", + "sum": "\u2211", + "sung": "\u266A", + "sup": "\u2283", + "sup1": "\u00B9", + "sup2": "\u00B2", + "sup3": "\u00B3", + "supE": "\u2AC6", + "supdot": "\u2ABE", + "supdsub": "\u2AD8", + "supe": "\u2287", + "supedot": "\u2AC4", + "suphsol": "\u27C9", + "suphsub": "\u2AD7", + "suplarr": "\u297B", + "supmult": "\u2AC2", + "supnE": "\u2ACC", + "supne": "\u228B", + "supplus": "\u2AC0", + "supset": "\u2283", + "supseteq": "\u2287", + "supseteqq": "\u2AC6", + "supsetneq": "\u228B", + "supsetneqq": "\u2ACC", + "supsim": "\u2AC8", + "supsub": "\u2AD4", + "supsup": "\u2AD6", + "swArr": "\u21D9", + "swarhk": "\u2926", + "swarr": "\u2199", + "swarrow": "\u2199", + "swnwar": "\u292A", + "szlig": "\u00DF", + "target": "\u2316", + "tau": "\u03C4", + "tbrk": "\u23B4", + "tcaron": "\u0165", + "tcedil": "\u0163", + "tcy": "\u0442", + "tdot": "\u20DB", + "telrec": "\u2315", + "tfr": "\U0001D531", + "there4": "\u2234", + "therefore": "\u2234", + "theta": "\u03B8", + "thetasym": "\u03D1", + "thetav": "\u03D1", + "thickapprox": "\u2248", + "thicksim": "\u223C", + "thinsp": "\u2009", + "thkap": "\u2248", + "thksim": "\u223C", + "thorn": "\u00FE", + "tilde": "\u02DC", + "times": "\u00D7", + "timesb": "\u22A0", + "timesbar": "\u2A31", + "timesd": "\u2A30", + "tint": "\u222D", + "toea": "\u2928", + "top": "\u22A4", + "topbot": "\u2336", + "topcir": "\u2AF1", + "topf": "\U0001D565", + "topfork": "\u2ADA", + "tosa": "\u2929", + "tprime": "\u2034", + "trade": "\u2122", + "triangle": "\u25B5", + "triangledown": "\u25BF", + "triangleleft": "\u25C3", + "trianglelefteq": "\u22B4", + "triangleq": "\u225C", + "triangleright": "\u25B9", + "trianglerighteq": "\u22B5", + "tridot": "\u25EC", + "trie": "\u225C", + "triminus": "\u2A3A", + "triplus": "\u2A39", + "trisb": "\u29CD", + "tritime": "\u2A3B", + "trpezium": "\u23E2", + "tscr": "\U0001D4C9", + "tscy": "\u0446", + "tshcy": "\u045B", + "tstrok": "\u0167", + "twixt": "\u226C", + "twoheadleftarrow": "\u219E", + "twoheadrightarrow": "\u21A0", + "uArr": "\u21D1", + "uHar": "\u2963", + "uacute": "\u00FA", + "uarr": "\u2191", + "ubrcy": "\u045E", + "ubreve": "\u016D", + "ucirc": "\u00FB", + "ucy": "\u0443", + "udarr": "\u21C5", + "udblac": "\u0171", + "udhar": "\u296E", + "ufisht": "\u297E", + "ufr": "\U0001D532", + "ugrave": "\u00F9", + "uharl": "\u21BF", + "uharr": "\u21BE", + "uhblk": "\u2580", + "ulcorn": "\u231C", + "ulcorner": "\u231C", + "ulcrop": "\u230F", + "ultri": "\u25F8", + "umacr": "\u016B", + "uml": "\u00A8", + "uogon": "\u0173", + "uopf": "\U0001D566", + "uparrow": "\u2191", + "updownarrow": "\u2195", + "upharpoonleft": "\u21BF", + "upharpoonright": "\u21BE", + "uplus": "\u228E", + "upsi": "\u03C5", + "upsih": "\u03D2", + "upsilon": "\u03C5", + "upuparrows": "\u21C8", + "urcorn": "\u231D", + "urcorner": "\u231D", + "urcrop": "\u230E", + "uring": "\u016F", + "urtri": "\u25F9", + "uscr": "\U0001D4CA", + "utdot": "\u22F0", + "utilde": "\u0169", + "utri": "\u25B5", + "utrif": "\u25B4", + "uuarr": "\u21C8", + "uuml": "\u00FC", + "uwangle": "\u29A7", + "vArr": "\u21D5", + "vBar": "\u2AE8", + "vBarv": "\u2AE9", + "vDash": "\u22A8", + "vangrt": "\u299C", + "varepsilon": "\u03F5", + "varkappa": "\u03F0", + "varnothing": "\u2205", + "varphi": "\u03D5", + "varpi": "\u03D6", + "varpropto": "\u221D", + "varr": "\u2195", + "varrho": "\u03F1", + "varsigma": "\u03C2", + "varsubsetneq": "\u228A\uFE00", + "varsubsetneqq": "\u2ACB\uFE00", + "varsupsetneq": "\u228B\uFE00", + "varsupsetneqq": "\u2ACC\uFE00", + "vartheta": "\u03D1", + "vartriangleleft": "\u22B2", + "vartriangleright": "\u22B3", + "vcy": "\u0432", + "vdash": "\u22A2", + "vee": "\u2228", + "veebar": "\u22BB", + "veeeq": "\u225A", + "vellip": "\u22EE", + "verbar": "\u007C", + "vert": "\u007C", + "vfr": "\U0001D533", + "vltri": "\u22B2", + "vnsub": "\u2282\u20D2", + "vnsup": "\u2283\u20D2", + "vopf": "\U0001D567", + "vprop": "\u221D", + "vrtri": "\u22B3", + "vscr": "\U0001D4CB", + "vsubnE": "\u2ACB\uFE00", + "vsubne": "\u228A\uFE00", + "vsupnE": "\u2ACC\uFE00", + "vsupne": "\u228B\uFE00", + "vzigzag": "\u299A", + "wcirc": "\u0175", + "wedbar": "\u2A5F", + "wedge": "\u2227", + "wedgeq": "\u2259", + "weierp": "\u2118", + "wfr": "\U0001D534", + "wopf": "\U0001D568", + "wp": "\u2118", + "wr": "\u2240", + "wreath": "\u2240", + "wscr": "\U0001D4CC", + "xcap": "\u22C2", + "xcirc": "\u25EF", + "xcup": "\u22C3", + "xdtri": "\u25BD", + "xfr": "\U0001D535", + "xhArr": "\u27FA", + "xharr": "\u27F7", + "xi": "\u03BE", + "xlArr": "\u27F8", + "xlarr": "\u27F5", + "xmap": "\u27FC", + "xnis": "\u22FB", + "xodot": "\u2A00", + "xopf": "\U0001D569", + "xoplus": "\u2A01", + "xotime": "\u2A02", + "xrArr": "\u27F9", + "xrarr": "\u27F6", + "xscr": "\U0001D4CD", + "xsqcup": "\u2A06", + "xuplus": "\u2A04", + "xutri": "\u25B3", + "xvee": "\u22C1", + "xwedge": "\u22C0", + "yacute": "\u00FD", + "yacy": "\u044F", + "ycirc": "\u0177", + "ycy": "\u044B", + "yen": "\u00A5", + "yfr": "\U0001D536", + "yicy": "\u0457", + "yopf": "\U0001D56A", + "yscr": "\U0001D4CE", + "yucy": "\u044E", + "yuml": "\u00FF", + "zacute": "\u017A", + "zcaron": "\u017E", + "zcy": "\u0437", + "zdot": "\u017C", + "zeetrf": "\u2128", + "zeta": "\u03B6", + "zfr": "\U0001D537", + "zhcy": "\u0436", + "zigrarr": "\u21DD", + "zopf": "\U0001D56B", + "zscr": "\U0001D4CF", + "zwj": "\u200D", + "zwnj": "\u200C", +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/indented_code.go b/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/indented_code.go new file mode 100644 index 00000000..9a4a6b82 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/indented_code.go @@ -0,0 +1,99 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package markdown + +import ( + "strings" +) + +type IndentedCodeLine struct { + Indentation int + Range Range +} + +type IndentedCode struct { + blockBase + markdown string + + RawCode []IndentedCodeLine +} + +func (b *IndentedCode) Code() string { + var resultSb strings.Builder + for _, code := range b.RawCode { + resultSb.WriteString(strings.Repeat(" ", code.Indentation) + b.markdown[code.Range.Position:code.Range.End]) + } + return resultSb.String() +} + +func (b *IndentedCode) Continuation(indentation int, r Range) *continuation { + if indentation >= 4 { + return &continuation{ + Indentation: indentation - 4, + Remaining: r, + } + } + s := b.markdown[r.Position:r.End] + if strings.TrimSpace(s) == "" { + return &continuation{ + Remaining: r, + } + } + return nil +} + +func (b *IndentedCode) AddLine(indentation int, r Range) bool { + b.RawCode = append(b.RawCode, IndentedCodeLine{ + Indentation: indentation, + Range: r, + }) + return true +} + +func (b *IndentedCode) Close() { + for { + last := b.RawCode[len(b.RawCode)-1] + s := b.markdown[last.Range.Position:last.Range.End] + if strings.TrimRight(s, "\r\n") == "" { + b.RawCode = b.RawCode[:len(b.RawCode)-1] + } else { + break + } + } +} + +func (b *IndentedCode) AllowsBlockStarts() bool { + return false +} + +func indentedCodeStart(markdown string, indentation int, r Range, matchedBlocks, unmatchedBlocks []Block) []Block { + if len(unmatchedBlocks) > 0 { + if _, ok := unmatchedBlocks[len(unmatchedBlocks)-1].(*Paragraph); ok { + return nil + } + } else if len(matchedBlocks) > 0 { + if _, ok := matchedBlocks[len(matchedBlocks)-1].(*Paragraph); ok { + return nil + } + } + + if indentation < 4 { + return nil + } + + s := markdown[r.Position:r.End] + if strings.TrimSpace(s) == "" { + return nil + } + + return []Block{ + &IndentedCode{ + markdown: markdown, + RawCode: []IndentedCodeLine{{ + Indentation: indentation - 4, + Range: r, + }}, + }, + } +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/inlines.go b/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/inlines.go new file mode 100644 index 00000000..fa10753b --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/inlines.go @@ -0,0 +1,681 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package markdown + +import ( + "container/list" + "strings" + "unicode" + "unicode/utf8" +) + +type Inline interface { + IsInline() bool +} + +type inlineBase struct{} + +func (inlineBase) IsInline() bool { return true } + +type Text struct { + inlineBase + + Text string + Range Range +} + +type CodeSpan struct { + inlineBase + + Code string +} + +type HardLineBreak struct { + inlineBase +} + +type SoftLineBreak struct { + inlineBase +} + +type InlineLinkOrImage struct { + inlineBase + + Children []Inline + + RawDestination Range + + markdown string + rawTitle string +} + +func (i *InlineLinkOrImage) Destination() string { + return Unescape(i.markdown[i.RawDestination.Position:i.RawDestination.End]) +} + +func (i *InlineLinkOrImage) Title() string { + return Unescape(i.rawTitle) +} + +type InlineLink struct { + InlineLinkOrImage +} + +type InlineImage struct { + InlineLinkOrImage +} + +type ReferenceLinkOrImage struct { + inlineBase + *ReferenceDefinition + + Children []Inline +} + +type ReferenceLink struct { + ReferenceLinkOrImage +} + +type ReferenceImage struct { + ReferenceLinkOrImage +} + +type Autolink struct { + inlineBase + + Children []Inline + + RawDestination Range + + markdown string +} + +func (i *Autolink) Destination() string { + destination := Unescape(i.markdown[i.RawDestination.Position:i.RawDestination.End]) + + if strings.HasPrefix(destination, "www") { + destination = "http://" + destination + } + + return destination +} + +type Emoji struct { + inlineBase + + Name string +} + +type delimiterType int + +const ( + linkOpeningDelimiter delimiterType = iota + imageOpeningDelimiter +) + +type delimiter struct { + Type delimiterType + IsInactive bool + TextNode int + Range Range +} + +type inlineParser struct { + markdown string + ranges []Range + referenceDefinitions []*ReferenceDefinition + + raw string + position int + inlines []Inline + delimiterStack *list.List +} + +func newInlineParser(markdown string, ranges []Range, referenceDefinitions []*ReferenceDefinition) *inlineParser { + return &inlineParser{ + markdown: markdown, + ranges: ranges, + referenceDefinitions: referenceDefinitions, + delimiterStack: list.New(), + } +} + +func (p *inlineParser) parseBackticks() { + count := 1 + for i := p.position + 1; i < len(p.raw) && p.raw[i] == '`'; i++ { + count++ + } + opening := p.raw[p.position : p.position+count] + search := p.position + count + for search < len(p.raw) { + end := strings.Index(p.raw[search:], opening) + if end == -1 { + break + } + if search+end+count < len(p.raw) && p.raw[search+end+count] == '`' { + search += end + count + for search < len(p.raw) && p.raw[search] == '`' { + search++ + } + continue + } + code := strings.Join(strings.Fields(p.raw[p.position+count:search+end]), " ") + p.position = search + end + count + p.inlines = append(p.inlines, &CodeSpan{ + Code: code, + }) + return + } + p.position += len(opening) + absPos := relativeToAbsolutePosition(p.ranges, p.position-len(opening)) + p.inlines = append(p.inlines, &Text{ + Text: opening, + Range: Range{absPos, absPos + len(opening)}, + }) +} + +func (p *inlineParser) parseLineEnding() { + if p.position >= 1 && p.raw[p.position-1] == '\t' { + p.inlines = append(p.inlines, &HardLineBreak{}) + } else if p.position >= 2 && p.raw[p.position-1] == ' ' && (p.raw[p.position-2] == '\t' || p.raw[p.position-1] == ' ') { + p.inlines = append(p.inlines, &HardLineBreak{}) + } else { + p.inlines = append(p.inlines, &SoftLineBreak{}) + } + p.position++ + if p.position < len(p.raw) && p.raw[p.position] == '\n' { + p.position++ + } +} + +func (p *inlineParser) parseEscapeCharacter() { + if p.position+1 < len(p.raw) && isEscapableByte(p.raw[p.position+1]) { + absPos := relativeToAbsolutePosition(p.ranges, p.position+1) + p.inlines = append(p.inlines, &Text{ + Text: string(p.raw[p.position+1]), + Range: Range{absPos, absPos + len(string(p.raw[p.position+1]))}, + }) + p.position += 2 + } else { + absPos := relativeToAbsolutePosition(p.ranges, p.position) + p.inlines = append(p.inlines, &Text{ + Text: `\`, + Range: Range{absPos, absPos + 1}, + }) + p.position++ + } +} + +func (p *inlineParser) parseText() { + if next := strings.IndexAny(p.raw[p.position:], "\r\n\\`&![]wW:"); next == -1 { + absPos := relativeToAbsolutePosition(p.ranges, p.position) + p.inlines = append(p.inlines, &Text{ + Text: strings.TrimRightFunc(p.raw[p.position:], isWhitespace), + Range: Range{absPos, absPos + len(p.raw[p.position:])}, + }) + p.position = len(p.raw) + } else { + absPos := relativeToAbsolutePosition(p.ranges, p.position) + if p.raw[p.position+next] == '\r' || p.raw[p.position+next] == '\n' { + s := strings.TrimRightFunc(p.raw[p.position:p.position+next], isWhitespace) + p.inlines = append(p.inlines, &Text{ + Text: s, + Range: Range{absPos, absPos + len(s)}, + }) + } else { + if next == 0 { + // Always read at least one character since 'w', 'W', and ':' may not actually match another + // type of node + next = 1 + } + + p.inlines = append(p.inlines, &Text{ + Text: p.raw[p.position : p.position+next], + Range: Range{absPos, absPos + next}, + }) + } + p.position += next + } +} + +func (p *inlineParser) parseLinkOrImageDelimiter() { + absPos := relativeToAbsolutePosition(p.ranges, p.position) + if p.raw[p.position] == '[' { + p.inlines = append(p.inlines, &Text{ + Text: "[", + Range: Range{absPos, absPos + 1}, + }) + p.delimiterStack.PushBack(&delimiter{ + Type: linkOpeningDelimiter, + TextNode: len(p.inlines) - 1, + Range: Range{p.position, p.position + 1}, + }) + p.position++ + } else if p.raw[p.position] == '!' && p.position+1 < len(p.raw) && p.raw[p.position+1] == '[' { + p.inlines = append(p.inlines, &Text{ + Text: "![", + Range: Range{absPos, absPos + 2}, + }) + p.delimiterStack.PushBack(&delimiter{ + Type: imageOpeningDelimiter, + TextNode: len(p.inlines) - 1, + Range: Range{p.position, p.position + 2}, + }) + p.position += 2 + } else { + p.inlines = append(p.inlines, &Text{ + Text: "!", + Range: Range{absPos, absPos + 1}, + }) + p.position++ + } +} + +func (p *inlineParser) peekAtInlineLinkDestinationAndTitle(position int, isImage bool) (destination, title Range, end int, ok bool) { + if position >= len(p.raw) || p.raw[position] != '(' { + return + } + position++ + + destinationStart := nextNonWhitespace(p.raw, position) + if destinationStart >= len(p.raw) { + return + } else if p.raw[destinationStart] == ')' { + return Range{destinationStart, destinationStart}, Range{destinationStart, destinationStart}, destinationStart + 1, true + } + + destination, end, ok = parseLinkDestination(p.raw, destinationStart) + if !ok { + return + } + position = end + + if isImage && position < len(p.raw) && isWhitespaceByte(p.raw[position]) { + dimensionsStart := nextNonWhitespace(p.raw, position) + if dimensionsStart >= len(p.raw) { + return + } + + if p.raw[dimensionsStart] == '=' { + // Read optional image dimensions even if we don't use them + _, end, ok = parseImageDimensions(p.raw, dimensionsStart) + if !ok { + return + } + + position = end + } + } + + if position < len(p.raw) && isWhitespaceByte(p.raw[position]) { + titleStart := nextNonWhitespace(p.raw, position) + if titleStart >= len(p.raw) { + return + } else if p.raw[titleStart] == ')' { + return destination, Range{titleStart, titleStart}, titleStart + 1, true + } + + if p.raw[titleStart] == '"' || p.raw[titleStart] == '\'' || p.raw[titleStart] == '(' { + title, end, ok = parseLinkTitle(p.raw, titleStart) + if !ok { + return + } + position = end + } + } + + closingPosition := nextNonWhitespace(p.raw, position) + if closingPosition >= len(p.raw) || p.raw[closingPosition] != ')' { + return Range{}, Range{}, 0, false + } + + return destination, title, closingPosition + 1, true +} + +func (p *inlineParser) referenceDefinition(label string) *ReferenceDefinition { + clean := strings.Join(strings.Fields(label), " ") + for _, d := range p.referenceDefinitions { + if strings.EqualFold(clean, strings.Join(strings.Fields(d.Label()), " ")) { + return d + } + } + return nil +} + +func (p *inlineParser) lookForLinkOrImage() { + for element := p.delimiterStack.Back(); element != nil; element = element.Prev() { + d := element.Value.(*delimiter) + if d.Type != imageOpeningDelimiter && d.Type != linkOpeningDelimiter { + continue + } + if d.IsInactive { + p.delimiterStack.Remove(element) + break + } + + isImage := d.Type == imageOpeningDelimiter + + var inline Inline + + if destination, title, next, ok := p.peekAtInlineLinkDestinationAndTitle(p.position+1, isImage); ok { + destinationMarkdownPosition := relativeToAbsolutePosition(p.ranges, destination.Position) + linkOrImage := InlineLinkOrImage{ + Children: append([]Inline(nil), p.inlines[d.TextNode+1:]...), + RawDestination: Range{destinationMarkdownPosition, destinationMarkdownPosition + destination.End - destination.Position}, + markdown: p.markdown, + rawTitle: p.raw[title.Position:title.End], + } + if d.Type == imageOpeningDelimiter { + inline = &InlineImage{linkOrImage} + } else { + inline = &InlineLink{linkOrImage} + } + p.position = next + } else { + referenceLabel := "" + label, next, hasLinkLabel := parseLinkLabel(p.raw, p.position+1) + if hasLinkLabel && label.End > label.Position { + referenceLabel = p.raw[label.Position:label.End] + } else { + referenceLabel = p.raw[d.Range.End:p.position] + if !hasLinkLabel { + next = p.position + 1 + } + } + if referenceLabel != "" { + if reference := p.referenceDefinition(referenceLabel); reference != nil { + linkOrImage := ReferenceLinkOrImage{ + ReferenceDefinition: reference, + Children: append([]Inline(nil), p.inlines[d.TextNode+1:]...), + } + if d.Type == imageOpeningDelimiter { + inline = &ReferenceImage{linkOrImage} + } else { + inline = &ReferenceLink{linkOrImage} + } + p.position = next + } + } + } + + if inline != nil { + if d.Type == imageOpeningDelimiter { + p.inlines = append(p.inlines[:d.TextNode], inline) + } else { + p.inlines = append(p.inlines[:d.TextNode], inline) + for inlineElement := element.Prev(); inlineElement != nil; inlineElement = inlineElement.Prev() { + if d := inlineElement.Value.(*delimiter); d.Type == linkOpeningDelimiter { + d.IsInactive = true + } + } + } + p.delimiterStack.Remove(element) + return + } + p.delimiterStack.Remove(element) + break + } + absPos := relativeToAbsolutePosition(p.ranges, p.position) + p.inlines = append(p.inlines, &Text{ + Text: "]", + Range: Range{absPos, absPos + 1}, + }) + p.position++ +} + +func CharacterReference(ref string) string { + if ref == "" { + return "" + } + if ref[0] == '#' { + if len(ref) < 2 { + return "" + } + n := 0 + if ref[1] == 'X' || ref[1] == 'x' { + if len(ref) < 3 { + return "" + } + for i := 2; i < len(ref); i++ { + if i > 9 { + return "" + } + d := ref[i] + switch { + case d >= '0' && d <= '9': + n = n*16 + int(d-'0') + case d >= 'a' && d <= 'f': + n = n*16 + 10 + int(d-'a') + case d >= 'A' && d <= 'F': + n = n*16 + 10 + int(d-'A') + default: + return "" + } + } + } else { + for i := 1; i < len(ref); i++ { + if i > 8 || ref[i] < '0' || ref[i] > '9' { + return "" + } + n = n*10 + int(ref[i]-'0') + } + } + c := rune(n) + if c == '\u0000' || !utf8.ValidRune(c) { + return string(unicode.ReplacementChar) + } + return string(c) + } + if entity, ok := htmlEntities[ref]; ok { + return entity + } + return "" +} + +func (p *inlineParser) parseCharacterReference() { + absPos := relativeToAbsolutePosition(p.ranges, p.position) + p.position++ + if semicolon := strings.IndexByte(p.raw[p.position:], ';'); semicolon == -1 { + p.inlines = append(p.inlines, &Text{ + Text: "&", + Range: Range{absPos, absPos + 1}, + }) + } else if s := CharacterReference(p.raw[p.position : p.position+semicolon]); s != "" { + p.position += semicolon + 1 + p.inlines = append(p.inlines, &Text{ + Text: s, + Range: Range{absPos, absPos + len(s)}, + }) + } else { + p.inlines = append(p.inlines, &Text{ + Text: "&", + Range: Range{absPos, absPos + 1}, + }) + } +} + +func (p *inlineParser) parseAutolink(c rune) bool { + for element := p.delimiterStack.Back(); element != nil; element = element.Prev() { + d := element.Value.(*delimiter) + if !d.IsInactive { + return false + } + } + + var link Range + if c == ':' { + var ok bool + link, ok = parseURLAutolink(p.raw, p.position) + + if !ok { + return false + } + + // Since the current position is at the colon, we have to rewind the parsing slightly so that + // we don't duplicate the URL scheme + rewind := strings.Index(p.raw[link.Position:link.End], ":") + if rewind != -1 { + lastInline := p.inlines[len(p.inlines)-1] + lastText, ok := lastInline.(*Text) + + if !ok { + // This should never occur since parseURLAutolink will only return a non-empty value + // when the previous text ends in a valid URL protocol which would mean that the previous + // node is a Text node + return false + } + + p.inlines = p.inlines[0 : len(p.inlines)-1] + p.inlines = append(p.inlines, &Text{ + Text: lastText.Text[:len(lastText.Text)-rewind], + Range: Range{lastText.Range.Position, lastText.Range.End - rewind}, + }) + p.position -= rewind + } + } else if c == 'w' || c == 'W' { + var ok bool + link, ok = parseWWWAutolink(p.raw, p.position) + + if !ok { + return false + } + } + + linkMarkdownPosition := relativeToAbsolutePosition(p.ranges, link.Position) + linkRange := Range{linkMarkdownPosition, linkMarkdownPosition + link.End - link.Position} + + p.inlines = append(p.inlines, &Autolink{ + Children: []Inline{ + &Text{ + Text: p.raw[link.Position:link.End], + Range: linkRange, + }, + }, + RawDestination: linkRange, + markdown: p.markdown, + }) + p.position += (link.End - link.Position) + + return true +} + +func (p *inlineParser) Parse() []Inline { + for _, r := range p.ranges { + p.raw += p.markdown[r.Position:r.End] + } + + for p.position < len(p.raw) { + c, _ := utf8.DecodeRuneInString(p.raw[p.position:]) + + switch c { + case '\r', '\n': + p.parseLineEnding() + case '\\': + p.parseEscapeCharacter() + case '`': + p.parseBackticks() + case '&': + p.parseCharacterReference() + case '!', '[': + p.parseLinkOrImageDelimiter() + case ']': + p.lookForLinkOrImage() + case 'w', 'W': + matched := p.parseAutolink(c) + + if !matched { + p.parseText() + } + case ':': + matched := p.parseAutolink(c) + if matched { + continue + } + + matched = p.parseEmoji() + if matched { + continue + } + + p.parseText() + default: + p.parseText() + } + } + + return p.inlines +} + +func ParseInlines(markdown string, ranges []Range, referenceDefinitions []*ReferenceDefinition) (inlines []Inline) { + return newInlineParser(markdown, ranges, referenceDefinitions).Parse() +} + +func MergeInlineText(inlines []Inline) []Inline { + ret := inlines[:0] + for i, v := range inlines { + // always add first node + if i == 0 { + ret = append(ret, v) + continue + } + // not a text node? nothing to merge + text, ok := v.(*Text) + if !ok { + ret = append(ret, v) + continue + } + // previous node is not a text node? nothing to merge + prevText, ok := ret[len(ret)-1].(*Text) + if !ok { + ret = append(ret, v) + continue + } + // previous node is not right before this one + if prevText.Range.End != text.Range.Position { + ret = append(ret, v) + continue + } + // we have two consecutive text nodes + ret[len(ret)-1] = &Text{ + Text: prevText.Text + text.Text, + Range: Range{prevText.Range.Position, text.Range.End}, + } + } + return ret +} + +func Unescape(markdown string) string { + var ret strings.Builder + + position := 0 + for position < len(markdown) { + c, cSize := utf8.DecodeRuneInString(markdown[position:]) + + switch c { + case '\\': + if position+1 < len(markdown) && isEscapableByte(markdown[position+1]) { + ret.WriteByte(markdown[position+1]) + position += 2 + } else { + ret.WriteString(`\`) + position++ + } + case '&': + position++ + if semicolon := strings.IndexByte(markdown[position:], ';'); semicolon == -1 { + ret.WriteString("&") + } else if s := CharacterReference(markdown[position : position+semicolon]); s != "" { + position += semicolon + 1 + ret.WriteString(s) + } else { + ret.WriteString("&") + } + default: + ret.WriteRune(c) + position += cSize + } + } + + return ret.String() +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/inspect.go b/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/inspect.go new file mode 100644 index 00000000..151b9590 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/inspect.go @@ -0,0 +1,123 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package markdown + +const ( + // Assuming 64k maxSize of a post which can be stored in DB. + // Allow scanning upto twice(arbitrary value) the post size. + maxLen = 1024 * 64 * 2 +) + +// Inspect traverses the markdown tree in depth-first order. If f returns true, Inspect invokes f +// recursively for each child of the block or inline, followed by a call of f(nil). +func Inspect(markdown string, f func(any) bool) { + if len(markdown) > maxLen { + return + } + document, referenceDefinitions := Parse(markdown) + InspectBlock(document, func(block Block) bool { + if !f(block) { + return false + } + switch v := block.(type) { + case *Paragraph: + for _, inline := range MergeInlineText(v.ParseInlines(referenceDefinitions)) { + InspectInline(inline, func(inline Inline) bool { + return f(inline) + }) + } + } + return true + }) +} + +// InspectBlock traverses the blocks in depth-first order, starting with block. If f returns true, +// InspectBlock invokes f recursively for each child of the block, followed by a call of f(nil). +func InspectBlock(block Block, f func(Block) bool) { + stack := []Block{block} + // Using seen for backtracking + seen := map[Block]bool{} + + for len(stack) > 0 { + // "peek" the node from the stack + block := stack[len(stack)-1] + + if seen[block] { + // "pop" the node only when backtracking(seen) + stack = stack[:len(stack)-1] + f(nil) + continue + } + seen[block] = true + + // Process the node + if !f(block) { + continue + } + + switch v := block.(type) { + case *Document: + for i := len(v.Children) - 1; i >= 0; i-- { + stack = append(stack, v.Children[i]) + } + case *List: + for i := len(v.Children) - 1; i >= 0; i-- { + stack = append(stack, v.Children[i]) + } + case *ListItem: + for i := len(v.Children) - 1; i >= 0; i-- { + stack = append(stack, v.Children[i]) + } + case *BlockQuote: + for i := len(v.Children) - 1; i >= 0; i-- { + stack = append(stack, v.Children[i]) + } + } + } +} + +// InspectInline traverses the blocks in depth-first order, starting with block. If f returns true, +// InspectInline invokes f recursively for each child of the block, followed by a call of f(nil). +func InspectInline(inline Inline, f func(Inline) bool) { + stack := []Inline{inline} + // Using seen for backtracking + seen := map[Inline]bool{} + + for len(stack) > 0 { + // "peek" the node from the stack + inline := stack[len(stack)-1] + + if seen[inline] { + // "pop" the node only when backtracking(seen) + stack = stack[:len(stack)-1] + f(nil) + continue + } + seen[inline] = true + + // Process the node + if !f(inline) { + continue + } + + switch v := inline.(type) { + case *InlineImage: + for i := len(v.Children) - 1; i >= 0; i-- { + stack = append(stack, v.Children[i]) + } + case *InlineLink: + for i := len(v.Children) - 1; i >= 0; i-- { + stack = append(stack, v.Children[i]) + } + case *ReferenceImage: + for i := len(v.Children) - 1; i >= 0; i-- { + stack = append(stack, v.Children[i]) + } + case *ReferenceLink: + for i := len(v.Children) - 1; i >= 0; i-- { + stack = append(stack, v.Children[i]) + } + } + } +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/lines.go b/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/lines.go new file mode 100644 index 00000000..f59e5afe --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/lines.go @@ -0,0 +1,32 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package markdown + +import ( + "strings" +) + +type Line struct { + Range +} + +func ParseLines(markdown string) []Line { + lineStartPosition := 0 + isAfterCarriageReturn := false + lines := make([]Line, 0, strings.Count(markdown, "\n")) + for position, r := range markdown { + if r == '\n' { + lines = append(lines, Line{Range{lineStartPosition, position + 1}}) + lineStartPosition = position + 1 + } else if isAfterCarriageReturn { + lines = append(lines, Line{Range{lineStartPosition, position}}) + lineStartPosition = position + } + isAfterCarriageReturn = r == '\r' + } + if lineStartPosition < len(markdown) { + lines = append(lines, Line{Range{lineStartPosition, len(markdown)}}) + } + return lines +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/links.go b/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/links.go new file mode 100644 index 00000000..6aa56f25 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/links.go @@ -0,0 +1,184 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package markdown + +import ( + "unicode/utf8" +) + +func parseLinkDestination(markdown string, position int) (raw Range, next int, ok bool) { + if position >= len(markdown) { + return + } + + if markdown[position] == '<' { + isEscaped := false + + for offset, c := range []byte(markdown[position+1:]) { + if isEscaped { + isEscaped = false + if isEscapableByte(c) { + continue + } + } + + if c == '\\' { + isEscaped = true + } else if c == '<' { + break + } else if c == '>' { + return Range{position + 1, position + 1 + offset}, position + 1 + offset + 1, true + } else if isWhitespaceByte(c) { + break + } + } + } + + openCount := 0 + isEscaped := false + for offset, c := range []byte(markdown[position:]) { + if isEscaped { + isEscaped = false + if isEscapableByte(c) { + continue + } + } + + switch c { + case '\\': + isEscaped = true + case '(': + openCount++ + case ')': + if openCount < 1 { + return Range{position, position + offset}, position + offset, true + } + openCount-- + default: + if isWhitespaceByte(c) { + return Range{position, position + offset}, position + offset, true + } + } + } + return Range{position, len(markdown)}, len(markdown), true +} + +func parseLinkTitle(markdown string, position int) (raw Range, next int, ok bool) { + if position >= len(markdown) { + return + } + + originalPosition := position + + var closer byte + switch markdown[position] { + case '"', '\'': + closer = markdown[position] + case '(': + closer = ')' + default: + return + } + position++ + + for position < len(markdown) { + switch markdown[position] { + case '\\': + position++ + if position < len(markdown) && isEscapableByte(markdown[position]) { + position++ + } + case closer: + return Range{originalPosition + 1, position}, position + 1, true + default: + position++ + } + } + + return +} + +func parseLinkLabel(markdown string, position int) (raw Range, next int, ok bool) { + if position >= len(markdown) || markdown[position] != '[' { + return + } + + originalPosition := position + position++ + + for position < len(markdown) { + switch markdown[position] { + case '\\': + position++ + if position < len(markdown) && isEscapableByte(markdown[position]) { + position++ + } + case '[': + return + case ']': + if position-originalPosition >= 1000 && utf8.RuneCountInString(markdown[originalPosition:position]) >= 1000 { + return + } + return Range{originalPosition + 1, position}, position + 1, true + default: + position++ + } + } + + return +} + +// As a non-standard feature, we allow image links to specify dimensions of the image by adding "=WIDTHxHEIGHT" +// after the image destination but before the image title like ![alt](http://example.com/image.png =100x200 "title"). +// Both width and height are optional, but at least one of them must be specified. +func parseImageDimensions(markdown string, position int) (raw Range, next int, ok bool) { + if position >= len(markdown) { + return + } + + originalPosition := position + + // Read = + position += 1 + if position >= len(markdown) { + return + } + + // Read width + hasWidth := false + for position < len(markdown)-1 && isNumericByte(markdown[position]) { + hasWidth = true + position += 1 + } + + // Look for early end of dimensions + if isWhitespaceByte(markdown[position]) || markdown[position] == ')' { + return Range{originalPosition, position - 1}, position, true + } + + // Read the x + if (markdown[position] != 'x' && markdown[position] != 'X') || position == len(markdown)-1 { + return + } + position += 1 + + // Read height + hasHeight := false + for position < len(markdown)-1 && isNumericByte(markdown[position]) { + hasHeight = true + position += 1 + } + + // Make sure the there's no trailing characters + if !isWhitespaceByte(markdown[position]) && markdown[position] != ')' { + return + } + + if !hasWidth && !hasHeight { + // At least one of width or height is required + return + } + + return Range{originalPosition, position - 1}, position, true +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/list.go b/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/list.go new file mode 100644 index 00000000..39039295 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/list.go @@ -0,0 +1,220 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package markdown + +import ( + "strings" +) + +type ListItem struct { + blockBase + markdown string + hasTrailingBlankLine bool + hasBlankLineBetweenChildren bool + + Indentation int + Children []Block +} + +func (b *ListItem) Continuation(indentation int, r Range) *continuation { + s := b.markdown[r.Position:r.End] + if strings.TrimSpace(s) == "" { + if b.Children == nil { + return nil + } + return &continuation{ + Remaining: r, + } + } + if indentation < b.Indentation { + return nil + } + return &continuation{ + Indentation: indentation - b.Indentation, + Remaining: r, + } +} + +func (b *ListItem) AddChild(openBlocks []Block) []Block { + b.Children = append(b.Children, openBlocks[0]) + if b.hasTrailingBlankLine { + b.hasBlankLineBetweenChildren = true + } + b.hasTrailingBlankLine = false + return openBlocks +} + +func (b *ListItem) AddLine(indentation int, r Range) bool { + isBlank := strings.TrimSpace(b.markdown[r.Position:r.End]) == "" + if isBlank { + b.hasTrailingBlankLine = true + } + return false +} + +func (b *ListItem) HasTrailingBlankLine() bool { + return b.hasTrailingBlankLine || (len(b.Children) > 0 && b.Children[len(b.Children)-1].HasTrailingBlankLine()) +} + +func (b *ListItem) isLoose() bool { + if b.hasBlankLineBetweenChildren { + return true + } + for i, child := range b.Children { + if i < len(b.Children)-1 && child.HasTrailingBlankLine() { + return true + } + } + return false +} + +type List struct { + blockBase + markdown string + hasTrailingBlankLine bool + hasBlankLineBetweenChildren bool + + IsLoose bool + IsOrdered bool + OrderedStart int + BulletOrDelimiter byte + Children []*ListItem +} + +func (b *List) Continuation(indentation int, r Range) *continuation { + s := b.markdown[r.Position:r.End] + if strings.TrimSpace(s) == "" { + return &continuation{ + Remaining: r, + } + } + return &continuation{ + Indentation: indentation, + Remaining: r, + } +} + +func (b *List) AddChild(openBlocks []Block) []Block { + if item, ok := openBlocks[0].(*ListItem); ok { + b.Children = append(b.Children, item) + if b.hasTrailingBlankLine { + b.hasBlankLineBetweenChildren = true + } + b.hasTrailingBlankLine = false + return openBlocks + } else if list, ok := openBlocks[0].(*List); ok { + if len(list.Children) == 1 && list.IsOrdered == b.IsOrdered && list.BulletOrDelimiter == b.BulletOrDelimiter { + return b.AddChild(openBlocks[1:]) + } + } + return nil +} + +func (b *List) AddLine(indentation int, r Range) bool { + isBlank := strings.TrimSpace(b.markdown[r.Position:r.End]) == "" + if isBlank { + b.hasTrailingBlankLine = true + } + return false +} + +func (b *List) HasTrailingBlankLine() bool { + return b.hasTrailingBlankLine || (len(b.Children) > 0 && b.Children[len(b.Children)-1].HasTrailingBlankLine()) +} + +func (b *List) isLoose() bool { + if b.hasBlankLineBetweenChildren { + return true + } + for i, child := range b.Children { + if child.isLoose() || (i < len(b.Children)-1 && child.HasTrailingBlankLine()) { + return true + } + } + return false +} + +func (b *List) Close() { + b.IsLoose = b.isLoose() +} + +func parseListMarker(markdown string, r Range) (success, isOrdered bool, orderedStart int, bulletOrDelimiter byte, markerWidth int, remaining Range) { + digits := 0 + n := 0 + for i := r.Position; i < r.End && markdown[i] >= '0' && markdown[i] <= '9'; i++ { + digits++ + n = n*10 + int(markdown[i]-'0') + } + if digits > 0 { + if digits > 9 || r.Position+digits >= r.End { + return + } + next := markdown[r.Position+digits] + if next != '.' && next != ')' { + return + } + return true, true, n, next, digits + 1, Range{r.Position + digits + 1, r.End} + } + if r.Position >= r.End { + return + } + next := markdown[r.Position] + if next != '-' && next != '+' && next != '*' { + return + } + return true, false, 0, next, 1, Range{r.Position + 1, r.End} +} + +func listStart(markdown string, indent int, r Range, matchedBlocks, unmatchedBlocks []Block) []Block { + afterList := false + if len(matchedBlocks) > 0 { + _, afterList = matchedBlocks[len(matchedBlocks)-1].(*List) + } + if !afterList && indent > 3 { + return nil + } + + success, isOrdered, orderedStart, bulletOrDelimiter, markerWidth, remaining := parseListMarker(markdown, r) + if !success { + return nil + } + + isBlank := strings.TrimSpace(markdown[remaining.Position:remaining.End]) == "" + if len(matchedBlocks) > 0 && len(unmatchedBlocks) == 0 { + if _, ok := matchedBlocks[len(matchedBlocks)-1].(*Paragraph); ok { + if isBlank || (isOrdered && orderedStart != 1) { + return nil + } + } + } + + indentAfterMarker, indentBytesAfterMarker := countIndentation(markdown, remaining) + if !isBlank && indentAfterMarker < 1 { + return nil + } + + remaining = Range{remaining.Position + indentBytesAfterMarker, remaining.End} + consumedIndentAfterMarker := indentAfterMarker + if isBlank || indentAfterMarker >= 5 { + consumedIndentAfterMarker = 1 + } + + listItem := &ListItem{ + markdown: markdown, + Indentation: indent + markerWidth + consumedIndentAfterMarker, + } + list := &List{ + markdown: markdown, + IsOrdered: isOrdered, + OrderedStart: orderedStart, + BulletOrDelimiter: bulletOrDelimiter, + Children: []*ListItem{listItem}, + } + ret := []Block{list, listItem} + if descendants := blockStartOrParagraph(markdown, indentAfterMarker-consumedIndentAfterMarker, remaining, nil, nil); descendants != nil { + listItem.Children = append(listItem.Children, descendants[0]) + ret = append(ret, descendants...) + } + return ret +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/markdown.go b/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/markdown.go new file mode 100644 index 00000000..5d45400a --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/markdown.go @@ -0,0 +1,156 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +// This package implements a parser for the subset of the CommonMark spec necessary for us to do +// server-side processing. It is not a full implementation and lacks many features. But it is +// complete enough to efficiently and accurately allow us to do what we need to like rewrite image +// URLs for proxying. +package markdown + +import ( + "strings" +) + +func isEscapable(c rune) bool { + return c > ' ' && (c < '0' || (c > '9' && (c < 'A' || (c > 'Z' && (c < 'a' || (c > 'z' && c <= '~')))))) +} + +func isEscapableByte(c byte) bool { + return isEscapable(rune(c)) +} + +func isWhitespace(c rune) bool { + switch c { + case ' ', '\t', '\n', '\u000b', '\u000c', '\r': + return true + } + return false +} + +func isWhitespaceByte(c byte) bool { + return isWhitespace(rune(c)) +} + +func isNumeric(c rune) bool { + return c >= '0' && c <= '9' +} + +func isNumericByte(c byte) bool { + return isNumeric(rune(c)) +} + +func isHex(c rune) bool { + return isNumeric(c) || (c >= 'a' && c <= 'f') || (c >= 'A' && c <= 'F') +} + +func isHexByte(c byte) bool { + return isHex(rune(c)) +} + +func isAlphanumeric(c rune) bool { + return isNumeric(c) || (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') +} + +func isAlphanumericByte(c byte) bool { + return isAlphanumeric(rune(c)) +} + +// isWord returns true if c matches the \w regexp character class +func isWord(c rune) bool { + return isAlphanumeric(c) || c == '_' +} + +func isWordByte(c byte) bool { + return isWord(rune(c)) +} + +func nextNonWhitespace(markdown string, position int) int { + for offset, c := range []byte(markdown[position:]) { + if !isWhitespaceByte(c) { + return position + offset + } + } + return len(markdown) +} + +func nextLine(markdown string, position int) (linePosition int, skippedNonWhitespace bool) { + for i := position; i < len(markdown); i++ { + c := markdown[i] + if c == '\r' { + if i+1 < len(markdown) && markdown[i+1] == '\n' { + return i + 2, skippedNonWhitespace + } + return i + 1, skippedNonWhitespace + } else if c == '\n' { + return i + 1, skippedNonWhitespace + } else if !isWhitespaceByte(c) { + skippedNonWhitespace = true + } + } + return len(markdown), skippedNonWhitespace +} + +func countIndentation(markdown string, r Range) (spaces, bytes int) { + for i := r.Position; i < r.End; i++ { + if markdown[i] == ' ' { + spaces++ + bytes++ + } else if markdown[i] == '\t' { + spaces += 4 + bytes++ + } else { + break + } + } + return +} + +func trimLeftSpace(markdown string, r Range) Range { + s := markdown[r.Position:r.End] + trimmed := strings.TrimLeftFunc(s, isWhitespace) + return Range{r.Position, r.End - (len(s) - len(trimmed))} +} + +func trimRightSpace(markdown string, r Range) Range { + s := markdown[r.Position:r.End] + trimmed := strings.TrimRightFunc(s, isWhitespace) + return Range{r.Position, r.End - (len(s) - len(trimmed))} +} + +func relativeToAbsolutePosition(ranges []Range, position int) int { + rem := position + for _, r := range ranges { + l := r.End - r.Position + if rem < l { + return r.Position + rem + } + rem -= l + } + if len(ranges) == 0 { + return 0 + } + return ranges[len(ranges)-1].End +} + +func trimBytesFromRanges(ranges []Range, bytes int) (result []Range) { + rem := bytes + for _, r := range ranges { + if rem == 0 { + result = append(result, r) + continue + } + l := r.End - r.Position + if rem < l { + result = append(result, Range{r.Position + rem, r.End}) + rem = 0 + continue + } + rem -= l + } + return +} + +func Parse(markdown string) (*Document, []*ReferenceDefinition) { + lines := ParseLines(markdown) + return ParseBlocks(markdown, lines) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/paragraph.go b/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/paragraph.go new file mode 100644 index 00000000..aef01b5e --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/paragraph.go @@ -0,0 +1,71 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package markdown + +import ( + "strings" +) + +type Paragraph struct { + blockBase + markdown string + + Text []Range + ReferenceDefinitions []*ReferenceDefinition +} + +func (b *Paragraph) ParseInlines(referenceDefinitions []*ReferenceDefinition) []Inline { + return ParseInlines(b.markdown, b.Text, referenceDefinitions) +} + +func (b *Paragraph) Continuation(indentation int, r Range) *continuation { + s := b.markdown[r.Position:r.End] + if strings.TrimSpace(s) == "" { + return nil + } + return &continuation{ + Indentation: indentation, + Remaining: r, + } +} + +func (b *Paragraph) Close() { + for { + for i := 0; i < len(b.Text); i++ { + b.Text[i] = trimLeftSpace(b.markdown, b.Text[i]) + if b.Text[i].Position < b.Text[i].End { + break + } + } + + if len(b.Text) == 0 || b.Text[0].Position < b.Text[0].End && b.markdown[b.Text[0].Position] != '[' { + break + } + + definition, remaining := parseReferenceDefinition(b.markdown, b.Text) + if definition == nil { + break + } + b.ReferenceDefinitions = append(b.ReferenceDefinitions, definition) + b.Text = remaining + } + + for i := len(b.Text) - 1; i >= 0; i-- { + b.Text[i] = trimRightSpace(b.markdown, b.Text[i]) + if b.Text[i].Position < b.Text[i].End { + break + } + } +} + +func newParagraph(markdown string, r Range) *Paragraph { + s := markdown[r.Position:r.End] + if strings.TrimSpace(s) == "" { + return nil + } + return &Paragraph{ + markdown: markdown, + Text: []Range{r}, + } +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/reference_definition.go b/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/reference_definition.go new file mode 100644 index 00000000..1c40cf83 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/shared/markdown/reference_definition.go @@ -0,0 +1,78 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package markdown + +import "strings" + +type ReferenceDefinition struct { + RawDestination Range + + markdown string + rawLabel string + rawTitle string +} + +func (d *ReferenceDefinition) Destination() string { + return Unescape(d.markdown[d.RawDestination.Position:d.RawDestination.End]) +} + +func (d *ReferenceDefinition) Label() string { + return d.rawLabel +} + +func (d *ReferenceDefinition) Title() string { + return Unescape(d.rawTitle) +} + +func parseReferenceDefinition(markdown string, ranges []Range) (*ReferenceDefinition, []Range) { + var rawSb strings.Builder + for _, r := range ranges { + rawSb.WriteString(markdown[r.Position:r.End]) + } + raw := rawSb.String() + + label, next, ok := parseLinkLabel(raw, 0) + if !ok { + return nil, nil + } + position := next + + if position >= len(raw) || raw[position] != ':' { + return nil, nil + } + position++ + + destination, next, ok := parseLinkDestination(raw, nextNonWhitespace(raw, position)) + if !ok { + return nil, nil + } + position = next + + absoluteDestination := relativeToAbsolutePosition(ranges, destination.Position) + ret := &ReferenceDefinition{ + RawDestination: Range{absoluteDestination, absoluteDestination + destination.End - destination.Position}, + markdown: markdown, + rawLabel: raw[label.Position:label.End], + } + + if position < len(raw) && isWhitespaceByte(raw[position]) { + title, next, ok := parseLinkTitle(raw, nextNonWhitespace(raw, position)) + if !ok { + if nextLine, skippedNonWhitespace := nextLine(raw, position); !skippedNonWhitespace { + return ret, trimBytesFromRanges(ranges, nextLine) + } + return nil, nil + } + if nextLine, skippedNonWhitespace := nextLine(raw, next); !skippedNonWhitespace { + ret.rawTitle = raw[title.Position:title.End] + return ret, trimBytesFromRanges(ranges, nextLine) + } + } + + if nextLine, skippedNonWhitespace := nextLine(raw, position); !skippedNonWhitespace { + return ret, trimBytesFromRanges(ranges, nextLine) + } + + return nil, nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/shared/mlog/default.go b/vendor/github.com/mattermost/mattermost/server/public/shared/mlog/default.go new file mode 100644 index 00000000..0567c016 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/shared/mlog/default.go @@ -0,0 +1,63 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package mlog + +import ( + "bytes" + "encoding/json" + "fmt" + "os" +) + +// defaultLog manually encodes the log to STDERR, providing a basic, default logging implementation +// before mlog is fully configured. +func defaultLog(level Level, msg string, fields ...Field) { + mFields := make(map[string]string) + buf := &bytes.Buffer{} + + for _, fld := range fields { + buf.Reset() + fld.ValueString(buf, shouldQuote) + mFields[fld.Key] = buf.String() + } + + log := struct { + Level string `json:"level"` + Message string `json:"msg"` + Fields map[string]string `json:"fields,omitempty"` + }{ + level.Name, + msg, + mFields, + } + + if b, err := json.Marshal(log); err != nil { + fmt.Fprintf(os.Stderr, `{"level":"error","msg":"failed to encode log message"}%s`, "\n") + } else { + fmt.Fprintf(os.Stderr, "%s\n", b) + } +} + +func defaultIsLevelEnabled(level Level) bool { + return true +} + +func defaultCustomMultiLog(lvl []Level, msg string, fields ...Field) { + for _, level := range lvl { + defaultLog(level, msg, fields...) + } +} + +// shouldQuote returns true if val contains any characters that require quotations. +func shouldQuote(val string) bool { + for _, c := range val { + if !((c >= '0' && c <= '9') || + (c >= 'a' && c <= 'z') || + (c >= 'A' && c <= 'Z') || + c == '-' || c == '.' || c == '_' || c == '/' || c == '@' || c == '^' || c == '+') { + return true + } + } + return false +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/shared/mlog/global.go b/vendor/github.com/mattermost/mattermost/server/public/shared/mlog/global.go new file mode 100644 index 00000000..71d7430a --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/shared/mlog/global.go @@ -0,0 +1,129 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package mlog + +import ( + "sync" +) + +var ( + globalLogger *Logger + muxGlobalLogger sync.RWMutex +) + +func InitGlobalLogger(logger *Logger) { + muxGlobalLogger.Lock() + defer muxGlobalLogger.Unlock() + + globalLogger = logger +} + +func getGlobalLogger() *Logger { + muxGlobalLogger.RLock() + defer muxGlobalLogger.RUnlock() + + return globalLogger +} + +// IsLevelEnabled returns true only if at least one log target is +// configured to emit the specified log level. Use this check when +// gathering the log info may be expensive. +// +// Note, transformations and serializations done via fields are already +// lazily evaluated and don't require this check beforehand. +func IsLevelEnabled(level Level) bool { + logger := getGlobalLogger() + if logger == nil { + return defaultIsLevelEnabled(level) + } + return logger.IsLevelEnabled(level) +} + +// Log emits the log record for any targets configured for the specified level. +func Log(level Level, msg string, fields ...Field) { + logger := getGlobalLogger() + if logger == nil { + defaultLog(level, msg, fields...) + return + } + logger.Log(level, msg, fields...) +} + +// LogM emits the log record for any targets configured for the specified levels. +// Equivalent to calling `Log` once for each level. +func LogM(levels []Level, msg string, fields ...Field) { + logger := getGlobalLogger() + if logger == nil { + defaultCustomMultiLog(levels, msg, fields...) + return + } + logger.LogM(levels, msg, fields...) +} + +// Convenience method equivalent to calling `Log` with the `Trace` level. +func Trace(msg string, fields ...Field) { + logger := getGlobalLogger() + if logger == nil { + defaultLog(LvlTrace, msg, fields...) + return + } + logger.Trace(msg, fields...) +} + +// Convenience method equivalent to calling `Log` with the `Debug` level. +func Debug(msg string, fields ...Field) { + logger := getGlobalLogger() + if logger == nil { + defaultLog(LvlDebug, msg, fields...) + return + } + logger.Debug(msg, fields...) +} + +// Convenience method equivalent to calling `Log` with the `Info` level. +func Info(msg string, fields ...Field) { + logger := getGlobalLogger() + if logger == nil { + defaultLog(LvlInfo, msg, fields...) + return + } + logger.Info(msg, fields...) +} + +// Convenience method equivalent to calling `Log` with the `Warn` level. +func Warn(msg string, fields ...Field) { + logger := getGlobalLogger() + if logger == nil { + defaultLog(LvlWarn, msg, fields...) + return + } + logger.Warn(msg, fields...) +} + +// Convenience method equivalent to calling `Log` with the `Error` level. +func Error(msg string, fields ...Field) { + logger := getGlobalLogger() + if logger == nil { + defaultLog(LvlError, msg, fields...) + return + } + logger.Error(msg, fields...) +} + +// Convenience method equivalent to calling `Log` with the `Critical` level. +// DEPRECATED: Either use Error or Fatal. +// Critical level isn't added in mlog/levels.go:StdAll so calling this doesn't +// really work. For now we just call Fatal to atleast print something. +func Critical(msg string, fields ...Field) { + Fatal(msg, fields...) +} + +func Fatal(msg string, fields ...Field) { + logger := getGlobalLogger() + if logger == nil { + defaultLog(LvlFatal, msg, fields...) + return + } + logger.Fatal(msg, fields...) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/shared/mlog/levels.go b/vendor/github.com/mattermost/mattermost/server/public/shared/mlog/levels.go new file mode 100644 index 00000000..3004ea0d --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/shared/mlog/levels.go @@ -0,0 +1,76 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package mlog + +import "github.com/mattermost/logr/v2" + +// Standard levels. +var ( + LvlPanic = logr.Panic // ID = 0 + LvlFatal = logr.Fatal // ID = 1 + LvlError = logr.Error // ID = 2 + LvlWarn = logr.Warn // ID = 3 + LvlInfo = logr.Info // ID = 4 + LvlDebug = logr.Debug // ID = 5 + LvlTrace = logr.Trace // ID = 6 + StdAll = []Level{LvlPanic, LvlFatal, LvlError, LvlWarn, LvlInfo, LvlDebug, LvlTrace, LvlStdLog} + // non-standard "critical" level + LvlCritical = Level{ID: 7, Name: "critical"} + // used by redirected standard logger + LvlStdLog = Level{ID: 10, Name: "stdlog"} + // used only by the logger + LvlLogError = Level{ID: 11, Name: "logerror", Stacktrace: true} +) + +// Register custom (discrete) levels here. +// !!!!! Custom ID's must be between 20 and 32,768 !!!!!! +var ( + // used by the audit system + LvlAuditAPI = Level{ID: 100, Name: "audit-api"} + LvlAuditContent = Level{ID: 101, Name: "audit-content"} + LvlAuditPerms = Level{ID: 102, Name: "audit-permissions"} + LvlAuditCLI = Level{ID: 103, Name: "audit-cli"} + + // used by Remote Cluster Service + LvlRemoteClusterServiceDebug = Level{ID: 130, Name: "RemoteClusterServiceDebug"} + LvlRemoteClusterServiceError = Level{ID: 131, Name: "RemoteClusterServiceError"} + LvlRemoteClusterServiceWarn = Level{ID: 132, Name: "RemoteClusterServiceWarn"} + + // used by LDAP sync job + LvlLDAPError = Level{ID: 140, Name: "LDAPError"} + LvlLDAPWarn = Level{ID: 141, Name: "LDAPWarn"} + LvlLDAPInfo = Level{ID: 142, Name: "LDAPInfo"} + LvlLDAPDebug = Level{ID: 143, Name: "LDAPDebug"} + LvlLDAPTrace = Level{ID: 144, Name: "LDAPTrace"} + + // used by Shared Channel Sync Service + LvlSharedChannelServiceDebug = Level{ID: 200, Name: "SharedChannelServiceDebug"} + LvlSharedChannelServiceError = Level{ID: 201, Name: "SharedChannelServiceError"} + LvlSharedChannelServiceWarn = Level{ID: 202, Name: "SharedChannelServiceWarn"} + LvlSharedChannelServiceMessagesInbound = Level{ID: 203, Name: "SharedChannelServiceMsgInbound"} + LvlSharedChannelServiceMessagesOutbound = Level{ID: 204, Name: "SharedChannelServiceMsgOutbound"} + + // used by Notification Service + LvlNotificationError = Level{ID: 300, Name: "NotificationError"} + LvlNotificationWarn = Level{ID: 301, Name: "NotificationWarn"} + LvlNotificationInfo = Level{ID: 302, Name: "NotificationInfo"} + LvlNotificationDebug = Level{ID: 303, Name: "NotificationDebug"} + LvlNotificationTrace = Level{ID: 304, Name: "NotificationTrace"} +) + +// Combinations for LogM (log multi). +var ( + MLvlAuditAll = []Level{LvlAuditAPI, LvlAuditContent, LvlAuditPerms, LvlAuditCLI} + + MlvlLDAPError = []Level{LvlError, LvlLDAPError} + MlvlLDAPWarn = []Level{LvlWarn, LvlLDAPWarn} + MlvlLDAPInfo = []Level{LvlInfo, LvlLDAPInfo} + MlvlLDAPDebug = []Level{LvlDebug, LvlLDAPDebug} + + MlvlNotificationError = []Level{LvlError, LvlNotificationError} + MlvlNotificationWarn = []Level{LvlWarn, LvlNotificationWarn} + MlvlNotificationInfo = []Level{LvlInfo, LvlNotificationInfo} + MlvlNotificationDebug = []Level{LvlDebug, LvlNotificationDebug} + MlvlNotificationTrace = []Level{LvlTrace, LvlNotificationTrace} +) diff --git a/vendor/github.com/mattermost/mattermost/server/public/shared/mlog/metrics.go b/vendor/github.com/mattermost/mattermost/server/public/shared/mlog/metrics.go new file mode 100644 index 00000000..a98dcc24 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/shared/mlog/metrics.go @@ -0,0 +1,11 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package mlog + +import ( + "github.com/mattermost/logr/v2" +) + +type Gauge = logr.Gauge +type Counter = logr.Counter diff --git a/vendor/github.com/mattermost/mattermost/server/public/shared/mlog/mlog.go b/vendor/github.com/mattermost/mattermost/server/public/shared/mlog/mlog.go new file mode 100644 index 00000000..6c10a60b --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/shared/mlog/mlog.go @@ -0,0 +1,465 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +// Package mlog provides a simple wrapper around Logr. +package mlog + +import ( + "context" + "encoding/json" + "fmt" + "io" + "log" + "maps" + "os" + "strings" + "sync/atomic" + "time" + + "github.com/pkg/errors" + + "github.com/mattermost/logr/v2" + logrcfg "github.com/mattermost/logr/v2/config" +) + +const ( + ShutdownTimeout = time.Second * 15 + FlushTimeout = time.Second * 15 + DefaultMaxQueueSize = 1000 + DefaultMetricsUpdateFreqMillis = 15000 +) + +// LoggerIFace should be abbreviated as `logger`. +type LoggerIFace interface { + IsLevelEnabled(Level) bool + Trace(string, ...Field) + Debug(string, ...Field) + Info(string, ...Field) + Warn(string, ...Field) + Error(string, ...Field) + Critical(string, ...Field) + Fatal(string, ...Field) + Log(Level, string, ...Field) + LogM([]Level, string, ...Field) + With(fields ...Field) *Logger + Flush() error + Sugar(fields ...Field) Sugar + StdLogger(level Level) *log.Logger +} + +// Type and function aliases from Logr to limit the spread of dependencies. +type Field = logr.Field +type Level = logr.Level +type Option = logr.Option +type Target = logr.Target +type TargetInfo = logr.TargetInfo +type LogRec = logr.LogRec +type LogCloner = logr.LogCloner +type MetricsCollector = logr.MetricsCollector +type TargetCfg = logrcfg.TargetCfg +type TargetFactory = logrcfg.TargetFactory +type FormatterFactory = logrcfg.FormatterFactory +type Factories = logrcfg.Factories +type Sugar = logr.Sugar + +// LoggerConfiguration is a map of LogTarget configurations. +type LoggerConfiguration map[string]TargetCfg + +func (lc LoggerConfiguration) Append(cfg LoggerConfiguration) { + maps.Copy(lc, cfg) +} + +func (lc LoggerConfiguration) IsValid() error { + logger, err := logr.New() + if err != nil { + return errors.Wrap(err, "failed to create logger") + } + defer logger.Shutdown() + + err = logrcfg.ConfigureTargets(logger, lc, nil) + if err != nil { + return errors.Wrap(err, "logger configuration is invalid") + } + + return nil +} + +func (lc LoggerConfiguration) toTargetCfg() map[string]logrcfg.TargetCfg { + tcfg := make(map[string]logrcfg.TargetCfg) + maps.Copy(tcfg, lc) + return tcfg +} + +// Any picks the best supported field type based on type of val. +// For best performance when passing a struct (or struct pointer), +// implement `logr.LogWriter` on the struct, otherwise reflection +// will be used to generate a string representation. +var Any = logr.Any + +// Int constructs a field containing a key and int value. +func Int[T ~int | ~int8 | ~int16 | ~int32 | ~int64](key string, val T) Field { + return logr.Int[T](key, val) +} + +// Uint constructs a field containing a key and uint value. +func Uint[T ~uint | ~uint8 | ~uint16 | ~uint32 | ~uint64 | ~uintptr](key string, val T) Field { + return logr.Uint[T](key, val) +} + +// Float constructs a field containing a key and float value. +func Float[T ~float32 | ~float64](key string, val T) Field { + return logr.Float[T](key, val) +} + +// String constructs a field containing a key and string value. +func String[T ~string | ~[]byte](key string, val T) Field { + return logr.String[T](key, val) +} + +// Stringer constructs a field containing a key and a fmt.Stringer value. +// The fmt.Stringer's `String` method is called lazily. +var Stringer = func(key string, s fmt.Stringer) logr.Field { + if s == nil { + return Field{Key: key, Type: logr.StringType, String: ""} + } + return Field{Key: key, Type: logr.StringType, String: s.String()} +} + +// Err constructs a field containing a default key ("error") and error value. +var Err = func(err error) logr.Field { + return NamedErr("error", err) +} + +// NamedErr constructs a field containing a key and error value. +var NamedErr = func(key string, err error) logr.Field { + if err == nil { + return Field{Key: key, Type: logr.StringType, String: ""} + } + return Field{Key: key, Type: logr.StringType, String: err.Error()} +} + +// Bool constructs a field containing a key and bool value. +func Bool[T ~bool](key string, val T) Field { + return logr.Bool[T](key, val) +} + +// Time constructs a field containing a key and time.Time value. +var Time = logr.Time + +// Duration constructs a field containing a key and time.Duration value. +var Duration = logr.Duration + +// Millis constructs a field containing a key and timestamp value. +// The timestamp is expected to be milliseconds since Jan 1, 1970 UTC. +var Millis = logr.Millis + +// Array constructs a field containing a key and array value. +func Array[S ~[]E, E any](key string, val S) Field { + return logr.Array[S](key, val) +} + +// Map constructs a field containing a key and map value. +func Map[M ~map[K]V, K comparable, V any](key string, val M) Field { + return logr.Map[M](key, val) +} + +// Logger provides a thin wrapper around a Logr instance. This is a struct instead of an interface +// so that there are no allocations on the heap each interface method invocation. Normally not +// something to be concerned about, but logging calls for disabled levels should have as little CPU +// and memory impact as possible. Most of these wrapper calls will be inlined as well. +// +// Logger should be abbreviated as `logger`. +type Logger struct { + log *logr.Logger + lockConfig *int32 +} + +// NewLogger creates a new Logger instance which can be configured via `(*Logger).Configure`. +// Some options with invalid values can cause an error to be returned, however `NewLogger()` +// using just defaults never errors. +func NewLogger(options ...Option) (*Logger, error) { + options = append(options, logr.StackFilter(logr.GetPackageName("NewLogger"))) + + lgr, err := logr.New(options...) + if err != nil { + return nil, err + } + + log := lgr.NewLogger() + var lockConfig int32 + + return &Logger{ + log: &log, + lockConfig: &lockConfig, + }, nil +} + +// Configure provides a new configuration for this logger. +// Zero or more sources of config can be provided: +// +// cfgFile - path to file containing JSON +// cfgEscaped - JSON string probably from ENV var +// +// For each case JSON containing log targets is provided. Target name collisions are resolved +// using the following precedence: +// +// cfgFile > cfgEscaped +// +// An optional set of factories can be provided which will be called to create any target +// types or formatters not built-in. +func (l *Logger) Configure(cfgFile string, cfgEscaped string, factories *Factories) error { + if atomic.LoadInt32(l.lockConfig) != 0 { + return ErrConfigurationLock + } + + cfgMap := make(LoggerConfiguration) + + // Add config from file + if cfgFile != "" { + b, err := os.ReadFile(cfgFile) + if err != nil { + return fmt.Errorf("error reading logger config file %s: %w", cfgFile, err) + } + + var mapCfgFile LoggerConfiguration + if err := json.Unmarshal(b, &mapCfgFile); err != nil { + return fmt.Errorf("error decoding logger config file %s: %w", cfgFile, err) + } + cfgMap.Append(mapCfgFile) + } + + // Add config from escaped json string + if cfgEscaped != "" { + var mapCfgEscaped LoggerConfiguration + if err := json.Unmarshal([]byte(cfgEscaped), &mapCfgEscaped); err != nil { + return fmt.Errorf("error decoding logger config as escaped json: %w", err) + } + cfgMap.Append(mapCfgEscaped) + } + + if len(cfgMap) == 0 { + return nil + } + + return logrcfg.ConfigureTargets(l.log.Logr(), cfgMap.toTargetCfg(), factories) +} + +// ConfigureTargets provides a new configuration for this logger via a `LoggerConfig` map. +// `Logger.Configure` can be used instead which accepts JSON formatted configuration. +// An optional set of factories can be provided which will be called to create any target +// types or formatters not built-in. +func (l *Logger) ConfigureTargets(cfg LoggerConfiguration, factories *Factories) error { + if atomic.LoadInt32(l.lockConfig) != 0 { + return ErrConfigurationLock + } + return logrcfg.ConfigureTargets(l.log.Logr(), cfg.toTargetCfg(), factories) +} + +// LockConfiguration disallows further configuration changes until `UnlockConfiguration` +// is called. The previous locked stated is returned. +func (l *Logger) LockConfiguration() bool { + old := atomic.SwapInt32(l.lockConfig, 1) + return old != 0 +} + +// UnlockConfiguration allows configuration changes. The previous locked stated is returned. +func (l *Logger) UnlockConfiguration() bool { + old := atomic.SwapInt32(l.lockConfig, 0) + return old != 0 +} + +// IsConfigurationLocked returns the current state of the configuration lock. +func (l *Logger) IsConfigurationLocked() bool { + return atomic.LoadInt32(l.lockConfig) != 0 +} + +// With creates a new Logger with the specified fields. This is a light-weight +// operation and can be called on demand. +func (l *Logger) With(fields ...Field) *Logger { + logWith := l.log.With(fields...) + return &Logger{ + log: &logWith, + lockConfig: l.lockConfig, + } +} + +// IsLevelEnabled returns true only if at least one log target is +// configured to emit the specified log level. Use this check when +// gathering the log info may be expensive. +// +// Note, transformations and serializations done via fields are already +// lazily evaluated and don't require this check beforehand. +func (l *Logger) IsLevelEnabled(level Level) bool { + return l.log.IsLevelEnabled(level) +} + +// Log emits the log record for any targets configured for the specified level. +func (l *Logger) Log(level Level, msg string, fields ...Field) { + l.log.Log(level, msg, fields...) +} + +// LogM emits the log record for any targets configured for the specified levels. +// Equivalent to calling `Log` once for each level. +func (l *Logger) LogM(levels []Level, msg string, fields ...Field) { + l.log.LogM(levels, msg, fields...) +} + +// Convenience method equivalent to calling `Log` with the `Trace` level. +func (l *Logger) Trace(msg string, fields ...Field) { + l.log.Trace(msg, fields...) +} + +// Convenience method equivalent to calling `Log` with the `Debug` level. +func (l *Logger) Debug(msg string, fields ...Field) { + l.log.Debug(msg, fields...) +} + +// Convenience method equivalent to calling `Log` with the `Info` level. +func (l *Logger) Info(msg string, fields ...Field) { + l.log.Info(msg, fields...) +} + +// Convenience method equivalent to calling `Log` with the `Warn` level. +func (l *Logger) Warn(msg string, fields ...Field) { + l.log.Warn(msg, fields...) +} + +// Convenience method equivalent to calling `Log` with the `Error` level. +func (l *Logger) Error(msg string, fields ...Field) { + l.log.Error(msg, fields...) +} + +// Convenience method equivalent to calling `Log` with the `Critical` level. +func (l *Logger) Critical(msg string, fields ...Field) { + l.log.Log(LvlCritical, msg, fields...) +} + +// Convenience method equivalent to calling `Log` with the `Fatal` level, +// followed by `os.Exit(1)`. +func (l *Logger) Fatal(msg string, fields ...Field) { + l.log.Log(logr.Fatal, msg, fields...) + _ = l.Shutdown() + os.Exit(1) +} + +// HasTargets returns true if at least one log target has been added. +func (l *Logger) HasTargets() bool { + return l.log.Logr().HasTargets() +} + +// StdLogger creates a standard logger backed by this logger. +// All log records are output with the specified level. +func (l *Logger) StdLogger(level Level) *log.Logger { + return l.log.StdLogger(level) +} + +// StdLogWriter returns a writer that can be hooked up to the output of a golang standard logger +// anything written will be interpreted as log entries and passed to this logger. +func (l *Logger) StdLogWriter() io.Writer { + return &logWriter{ + logger: l, + } +} + +// RedirectStdLog redirects output from the standard library's package-global logger +// to this logger at the specified level and with zero or more Field's. Since this logger already +// handles caller annotations, timestamps, etc., it automatically disables the standard +// library's annotations and prefixing. +// A function is returned that restores the original prefix and flags and resets the standard +// library's output to os.Stdout. +func (l *Logger) RedirectStdLog(level Level, fields ...Field) func() { + return l.log.Logr().RedirectStdLog(level, fields...) +} + +// RemoveTargets safely removes one or more targets based on the filtering method. +// `f` should return true to delete the target, false to keep it. +// When removing a target, best effort is made to write any queued log records before +// closing, with ctx determining how much time can be spent in total. +// Note, keep the timeout short since this method blocks certain logging operations. +func (l *Logger) RemoveTargets(ctx context.Context, f func(ti TargetInfo) bool) error { + return l.log.Logr().RemoveTargets(ctx, f) +} + +// SetMetricsCollector sets (or resets) the metrics collector to be used for gathering +// metrics for all targets. Only targets added after this call will use the collector. +// +// To ensure all targets use a collector, use the `SetMetricsCollector` option when +// creating the Logger instead, or configure/reconfigure the Logger after calling this method. +func (l *Logger) SetMetricsCollector(collector MetricsCollector, updateFrequencyMillis int64) { + l.log.Logr().SetMetricsCollector(collector, updateFrequencyMillis) +} + +// Sugar creates a new `Logger` with a less structured API. Any fields are preserved. +func (l *Logger) Sugar(fields ...Field) Sugar { + return l.log.Sugar(fields...) +} + +// Flush forces all targets to write out any queued log records with a default timeout. +func (l *Logger) Flush() error { + ctx, cancel := context.WithTimeout(context.Background(), FlushTimeout) + defer cancel() + return l.log.Logr().FlushWithTimeout(ctx) +} + +// Flush forces all targets to write out any queued log records with the specified timeout. +func (l *Logger) FlushWithTimeout(ctx context.Context) error { + return l.log.Logr().FlushWithTimeout(ctx) +} + +// Shutdown shuts down the logger after making best efforts to flush any +// remaining records. +func (l *Logger) Shutdown() error { + ctx, cancel := context.WithTimeout(context.Background(), ShutdownTimeout) + defer cancel() + return l.log.Logr().ShutdownWithTimeout(ctx) +} + +// Shutdown shuts down the logger after making best efforts to flush any +// remaining records. +func (l *Logger) ShutdownWithTimeout(ctx context.Context) error { + return l.log.Logr().ShutdownWithTimeout(ctx) +} + +// GetPackageName reduces a fully qualified function name to the package name +// By sirupsen: https://github.com/sirupsen/logrus/blob/master/entry.go +func GetPackageName(f string) string { + for { + lastPeriod := strings.LastIndex(f, ".") + lastSlash := strings.LastIndex(f, "/") + if lastPeriod > lastSlash { + f = f[:lastPeriod] + } else { + break + } + } + return f +} + +// ShouldQuote returns true if val contains any characters that might be unsafe +// when injecting log output into an aggregator, viewer or report. +// Returning true means that val should be surrounded by quotation marks before being +// output into logs. +func ShouldQuote(val string) bool { + for _, c := range val { + if !((c >= '0' && c <= '9') || + (c >= 'a' && c <= 'z') || + (c >= 'A' && c <= 'Z') || + c == '-' || c == '.' || c == '_' || c == '/' || c == '@' || c == '^' || c == '+') { + return true + } + } + return false +} + +type logWriter struct { + logger *Logger +} + +func (lw *logWriter) Write(p []byte) (int, error) { + lw.logger.Info(string(p)) + return len(p), nil +} + +// ErrConfigurationLock is returned when one of a logger's configuration APIs is called +// while the configuration is locked. +var ErrConfigurationLock = errors.New("configuration is locked") diff --git a/vendor/github.com/mattermost/mattermost/server/public/shared/mlog/options.go b/vendor/github.com/mattermost/mattermost/server/public/shared/mlog/options.go new file mode 100644 index 00000000..b6b60ec6 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/shared/mlog/options.go @@ -0,0 +1,62 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package mlog + +import "github.com/mattermost/logr/v2" + +// MaxQueueSize is the maximum number of log records that can be queued. +// If exceeded, `OnQueueFull` is called which determines if the log +// record will be dropped or block until add is successful. +// Defaults to DefaultMaxQueueSize. +func MaxQueueSize(size int) Option { + return logr.MaxQueueSize(size) +} + +// OnLoggerError, when not nil, is called any time an internal +// logging error occurs. For example, this can happen when a +// target cannot connect to its data sink. +func OnLoggerError(f func(error)) Option { + return logr.OnLoggerError(f) +} + +// OnQueueFull, when not nil, is called on an attempt to add +// a log record to a full Logr queue. +// `MaxQueueSize` can be used to modify the maximum queue size. +// This function should return quickly, with a bool indicating whether +// the log record should be dropped (true) or block until the log record +// is successfully added (false). If nil then blocking (false) is assumed. +func OnQueueFull(f func(rec *LogRec, maxQueueSize int) bool) Option { + return logr.OnQueueFull(f) +} + +// OnTargetQueueFull, when not nil, is called on an attempt to add +// a log record to a full target queue provided the target supports reporting +// this condition. +// This function should return quickly, with a bool indicating whether +// the log record should be dropped (true) or block until the log record +// is successfully added (false). If nil then blocking (false) is assumed. +func OnTargetQueueFull(f func(target Target, rec *LogRec, maxQueueSize int) bool) Option { + return logr.OnTargetQueueFull(f) +} + +// SetMetricsCollector enables metrics collection by supplying a MetricsCollector. +// The MetricsCollector provides counters and gauges that are updated by log targets. +// `updateFreqMillis` determines how often polled metrics are updated. Defaults to 15000 (15 seconds) +// and must be at least 250 so we don't peg the CPU. +func SetMetricsCollector(collector MetricsCollector, updateFreqMillis int64) Option { + return logr.SetMetricsCollector(collector, updateFreqMillis) +} + +// StackFilter provides a list of package names to exclude from the top of +// stack traces. The Logr packages are automatically filtered. +func StackFilter(pkg ...string) Option { + return logr.StackFilter(pkg...) +} + +// MaxFieldLen is the maximum number of characters for a field. +// If exceeded, remaining bytes will be discarded. +// Defaults to DefaultMaxFieldLength. +func MaxFieldLen(size int) Option { + return logr.MaxFieldLen(size) +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/shared/mlog/tlog.go b/vendor/github.com/mattermost/mattermost/server/public/shared/mlog/tlog.go new file mode 100644 index 00000000..ee598265 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/shared/mlog/tlog.go @@ -0,0 +1,145 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package mlog + +import ( + "bytes" + "io" + "os" + "sync" + "testing" + + "github.com/mattermost/logr/v2" + "github.com/mattermost/logr/v2/formatters" + "github.com/mattermost/logr/v2/targets" +) + +// AddWriterTarget adds a simple io.Writer target to an existing Logger. +// The `io.Writer` can be a buffer which is useful for testing. +// When adding a buffer to collect logs make sure to use `mlog.Buffer` which is +// a thread safe version of `bytes.Buffer`. +func AddWriterTarget(logger *Logger, w io.Writer, useJSON bool, levels ...Level) error { + filter := logr.NewCustomFilter(levels...) + + var formatter logr.Formatter + if useJSON { + formatter = &formatters.JSON{EnableCaller: true} + } else { + formatter = &formatters.Plain{EnableCaller: true} + } + + target := targets.NewWriterTarget(w) + return logger.log.Logr().AddTarget(target, "_testWriter", filter, formatter, 1000) +} + +// CreateConsole createa a logger that outputs to [os.Stdout]. +// It's useful in places where no log configuration is accessible. +func CreateConsoleLogger() *Logger { + logger, err := NewLogger() + if err != nil { + panic("failed create logger " + err.Error()) + } + + filter := logr.StdFilter{ + Lvl: LvlTrace, + Stacktrace: LvlPanic, + } + formatter := &formatters.Plain{ + EnableCaller: true, + EnableColor: true, + } + + target := targets.NewWriterTarget(os.Stdout) + if err := logger.log.Logr().AddTarget(target, "_testcon", filter, formatter, 1000); err != nil { + panic("failed to add target " + err.Error()) + } + + return logger +} + +// CreateConsoleTestLogger creates a logger for unit tests. Log records are output to `os.Stdout`. +// All log messages with level trace or lower are logged. +// The returned logger get Shutdown() when the tests completes. The caller should not shut it down. +func CreateConsoleTestLogger(tb testing.TB) *Logger { + tb.Helper() + + logger, err := NewLogger() + if err != nil { + tb.Fatalf("failed create logger %v", err) + } + + filter := logr.StdFilter{ + Lvl: LvlTrace, + Stacktrace: LvlPanic, + } + formatter := &formatters.Plain{EnableCaller: true} + + target := targets.NewWriterTarget(os.Stdout) + if err := logger.log.Logr().AddTarget(target, "_testcon", filter, formatter, 1000); err != nil { + tb.Fatalf("failed to add target %v", err) + } + + tb.Cleanup(func() { + err := logger.Shutdown() + if err != nil { + tb.Fatalf("failed to shut down test logger %v", err) + } + }) + + return logger +} + +// CreateTestLogger creates a logger for unit tests. Log records are output via `t.Log`. +// All log messages with level trace or lower are logged. +// The returned logger get Shutdown() when the tests completes. The caller should not shut it down. +func CreateTestLogger(t *testing.T) *Logger { + t.Helper() + + logger, err := NewLogger() + if err != nil { + t.Fatalf("failed create logger %v", err) + } + + filter := logr.StdFilter{ + Lvl: LvlTrace, + Stacktrace: LvlPanic, + } + formatter := &formatters.Plain{EnableCaller: true} + target := targets.NewTestingTarget(t) + + if err := logger.log.Logr().AddTarget(target, "test", filter, formatter, 1000); err != nil { + t.Fatalf("failed to add target %v", err) + } + + t.Cleanup(func() { + err := logger.Shutdown() + if err != nil { + t.Errorf("failed to shut down test logger %v", err) + } + }) + + return logger +} + +// Buffer provides a thread-safe buffer useful for logging to memory in unit tests. +type Buffer struct { + buf bytes.Buffer + mux sync.Mutex +} + +func (b *Buffer) Read(p []byte) (n int, err error) { + b.mux.Lock() + defer b.mux.Unlock() + return b.buf.Read(p) +} +func (b *Buffer) Write(p []byte) (n int, err error) { + b.mux.Lock() + defer b.mux.Unlock() + return b.buf.Write(p) +} +func (b *Buffer) String() string { + b.mux.Lock() + defer b.mux.Unlock() + return b.buf.String() +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/shared/request/context.go b/vendor/github.com/mattermost/mattermost/server/public/shared/request/context.go new file mode 100644 index 00000000..2450b1aa --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/shared/request/context.go @@ -0,0 +1,199 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package request + +import ( + "context" + "testing" + + "github.com/mattermost/mattermost/server/public/model" + "github.com/mattermost/mattermost/server/public/shared/i18n" + "github.com/mattermost/mattermost/server/public/shared/mlog" +) + +// Context should be abbreviated as `rctx`. +type Context struct { + t i18n.TranslateFunc + session model.Session + requestId string + ipAddress string + xForwardedFor string + path string + userAgent string + acceptLanguage string + logger mlog.LoggerIFace + context context.Context +} + +func NewContext(ctx context.Context, requestId, ipAddress, xForwardedFor, path, userAgent, acceptLanguage string, t i18n.TranslateFunc) *Context { + return &Context{ + t: t, + requestId: requestId, + ipAddress: ipAddress, + xForwardedFor: xForwardedFor, + path: path, + userAgent: userAgent, + acceptLanguage: acceptLanguage, + context: ctx, + } +} + +func EmptyContext(logger mlog.LoggerIFace) *Context { + return &Context{ + t: i18n.T, + logger: logger, + context: context.Background(), + } +} + +// TestContext creates an empty context with a new logger to use in testing where a test helper is +// not required. +func TestContext(tb testing.TB) *Context { + logger := mlog.CreateConsoleTestLogger(tb) + return EmptyContext(logger) +} + +// clone creates a shallow copy of Context, allowing clones to apply per-request changes. +func (c *Context) clone() *Context { + cCopy := *c + return &cCopy +} + +func (c *Context) T(translationID string, args ...any) string { + return c.t(translationID, args...) +} + +func (c *Context) GetT() i18n.TranslateFunc { + return c.t +} + +func (c *Context) Session() *model.Session { + return &c.session +} + +func (c *Context) RequestId() string { + return c.requestId +} + +func (c *Context) IPAddress() string { + return c.ipAddress +} + +func (c *Context) XForwardedFor() string { + return c.xForwardedFor +} + +func (c *Context) Path() string { + return c.path +} + +func (c *Context) UserAgent() string { + return c.userAgent +} + +func (c *Context) AcceptLanguage() string { + return c.acceptLanguage +} + +func (c *Context) Logger() mlog.LoggerIFace { + return c.logger +} + +func (c *Context) Context() context.Context { + return c.context +} + +func (c *Context) WithT(t i18n.TranslateFunc) CTX { + rctx := c.clone() + rctx.t = t + return rctx +} + +func (c *Context) WithSession(s *model.Session) CTX { + rctx := c.clone() + if s == nil { + rctx.session = model.Session{} + } else { + rctx.session = *s + } + return rctx +} + +func (c *Context) WithRequestId(s string) CTX { + rctx := c.clone() + rctx.requestId = s + return rctx +} + +func (c *Context) WithIPAddress(s string) CTX { + rctx := c.clone() + rctx.ipAddress = s + return rctx +} + +func (c *Context) WithXForwardedFor(s string) CTX { + rctx := c.clone() + rctx.xForwardedFor = s + return rctx +} + +func (c *Context) WithPath(s string) CTX { + rctx := c.clone() + rctx.path = s + return rctx +} + +func (c *Context) WithUserAgent(s string) CTX { + rctx := c.clone() + rctx.userAgent = s + return rctx +} + +func (c *Context) WithAcceptLanguage(s string) CTX { + rctx := c.clone() + rctx.acceptLanguage = s + return rctx +} + +func (c *Context) WithContext(ctx context.Context) CTX { + rctx := c.clone() + rctx.context = ctx + return rctx +} + +func (c *Context) WithLogger(logger mlog.LoggerIFace) CTX { + rctx := c.clone() + rctx.logger = logger + return rctx +} + +func (c *Context) With(f func(ctx CTX) CTX) CTX { + return f(c) +} + +// CTX should be abbreviated as `rctx`. +type CTX interface { + T(string, ...any) string + GetT() i18n.TranslateFunc + Session() *model.Session + RequestId() string + IPAddress() string + XForwardedFor() string + Path() string + UserAgent() string + AcceptLanguage() string + Logger() mlog.LoggerIFace + Context() context.Context + WithT(i18n.TranslateFunc) CTX + WithSession(s *model.Session) CTX + WithRequestId(string) CTX + WithIPAddress(string) CTX + WithXForwardedFor(string) CTX + WithPath(string) CTX + WithUserAgent(string) CTX + WithAcceptLanguage(string) CTX + WithLogger(mlog.LoggerIFace) CTX + WithContext(ctx context.Context) CTX + With(func(ctx CTX) CTX) CTX +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/shared/timezones/default.go b/vendor/github.com/mattermost/mattermost/server/public/shared/timezones/default.go new file mode 100644 index 00000000..3d835f7d --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/shared/timezones/default.go @@ -0,0 +1,599 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package timezones + +var DefaultSupportedTimezones = []string{ + "Africa/Abidjan", + "Africa/Accra", + "Africa/Addis_Ababa", + "Africa/Algiers", + "Africa/Asmara", + "Africa/Asmera", + "Africa/Bamako", + "Africa/Bangui", + "Africa/Banjul", + "Africa/Bissau", + "Africa/Blantyre", + "Africa/Brazzaville", + "Africa/Bujumbura", + "Africa/Cairo", + "Africa/Casablanca", + "Africa/Ceuta", + "Africa/Conakry", + "Africa/Dakar", + "Africa/Dar_es_Salaam", + "Africa/Djibouti", + "Africa/Douala", + "Africa/El_Aaiun", + "Africa/Freetown", + "Africa/Gaborone", + "Africa/Harare", + "Africa/Johannesburg", + "Africa/Juba", + "Africa/Kampala", + "Africa/Khartoum", + "Africa/Kigali", + "Africa/Kinshasa", + "Africa/Lagos", + "Africa/Libreville", + "Africa/Lome", + "Africa/Luanda", + "Africa/Lubumbashi", + "Africa/Lusaka", + "Africa/Malabo", + "Africa/Maputo", + "Africa/Maseru", + "Africa/Mbabane", + "Africa/Mogadishu", + "Africa/Monrovia", + "Africa/Nairobi", + "Africa/Ndjamena", + "Africa/Niamey", + "Africa/Nouakchott", + "Africa/Ouagadougou", + "Africa/Porto-Novo", + "Africa/Sao_Tome", + "Africa/Timbuktu", + "Africa/Tripoli", + "Africa/Tunis", + "Africa/Windhoek", + "America/Adak", + "America/Anchorage", + "America/Anguilla", + "America/Antigua", + "America/Araguaina", + "America/Argentina/Buenos_Aires", + "America/Argentina/Catamarca", + "America/Argentina/ComodRivadavia", + "America/Argentina/Cordoba", + "America/Argentina/Jujuy", + "America/Argentina/La_Rioja", + "America/Argentina/Mendoza", + "America/Argentina/Rio_Gallegos", + "America/Argentina/Salta", + "America/Argentina/San_Juan", + "America/Argentina/San_Luis", + "America/Argentina/Tucuman", + "America/Argentina/Ushuaia", + "America/Aruba", + "America/Asuncion", + "America/Atikokan", + "America/Atka", + "America/Bahia", + "America/Bahia_Banderas", + "America/Barbados", + "America/Belem", + "America/Belize", + "America/Blanc-Sablon", + "America/Boa_Vista", + "America/Bogota", + "America/Boise", + "America/Buenos_Aires", + "America/Cambridge_Bay", + "America/Campo_Grande", + "America/Cancun", + "America/Caracas", + "America/Catamarca", + "America/Cayenne", + "America/Cayman", + "America/Chicago", + "America/Chihuahua", + "America/Coral_Harbour", + "America/Cordoba", + "America/Costa_Rica", + "America/Creston", + "America/Cuiaba", + "America/Curacao", + "America/Danmarkshavn", + "America/Dawson", + "America/Dawson_Creek", + "America/Denver", + "America/Detroit", + "America/Dominica", + "America/Edmonton", + "America/Eirunepe", + "America/El_Salvador", + "America/Ensenada", + "America/Fort_Nelson", + "America/Fort_Wayne", + "America/Fortaleza", + "America/Glace_Bay", + "America/Godthab", + "America/Goose_Bay", + "America/Grand_Turk", + "America/Grenada", + "America/Guadeloupe", + "America/Guatemala", + "America/Guayaquil", + "America/Guyana", + "America/Halifax", + "America/Havana", + "America/Hermosillo", + "America/Indiana/Indianapolis", + "America/Indiana/Knox", + "America/Indiana/Marengo", + "America/Indiana/Petersburg", + "America/Indiana/Tell_City", + "America/Indiana/Vevay", + "America/Indiana/Vincennes", + "America/Indiana/Winamac", + "America/Indianapolis", + "America/Inuvik", + "America/Iqaluit", + "America/Jamaica", + "America/Jujuy", + "America/Juneau", + "America/Kentucky/Louisville", + "America/Kentucky/Monticello", + "America/Knox_IN", + "America/Kralendijk", + "America/La_Paz", + "America/Lima", + "America/Los_Angeles", + "America/Louisville", + "America/Lower_Princes", + "America/Maceio", + "America/Managua", + "America/Manaus", + "America/Marigot", + "America/Martinique", + "America/Matamoros", + "America/Mazatlan", + "America/Mendoza", + "America/Menominee", + "America/Merida", + "America/Metlakatla", + "America/Mexico_City", + "America/Miquelon", + "America/Moncton", + "America/Monterrey", + "America/Montevideo", + "America/Montreal", + "America/Montserrat", + "America/Nassau", + "America/New_York", + "America/Nipigon", + "America/Nome", + "America/Noronha", + "America/North_Dakota/Beulah", + "America/North_Dakota/Center", + "America/North_Dakota/New_Salem", + "America/Ojinaga", + "America/Panama", + "America/Pangnirtung", + "America/Paramaribo", + "America/Phoenix", + "America/Port-au-Prince", + "America/Port_of_Spain", + "America/Porto_Acre", + "America/Porto_Velho", + "America/Puerto_Rico", + "America/Punta_Arenas", + "America/Rainy_River", + "America/Rankin_Inlet", + "America/Recife", + "America/Regina", + "America/Resolute", + "America/Rio_Branco", + "America/Rosario", + "America/Santa_Isabel", + "America/Santarem", + "America/Santiago", + "America/Santo_Domingo", + "America/Sao_Paulo", + "America/Scoresbysund", + "America/Shiprock", + "America/Sitka", + "America/St_Barthelemy", + "America/St_Johns", + "America/St_Kitts", + "America/St_Lucia", + "America/St_Thomas", + "America/St_Vincent", + "America/Swift_Current", + "America/Tegucigalpa", + "America/Thule", + "America/Thunder_Bay", + "America/Tijuana", + "America/Toronto", + "America/Tortola", + "America/Vancouver", + "America/Virgin", + "America/Whitehorse", + "America/Winnipeg", + "America/Yakutat", + "America/Yellowknife", + "Antarctica/Casey", + "Antarctica/Davis", + "Antarctica/DumontDUrville", + "Antarctica/Macquarie", + "Antarctica/Mawson", + "Antarctica/McMurdo", + "Antarctica/Palmer", + "Antarctica/Rothera", + "Antarctica/South_Pole", + "Antarctica/Syowa", + "Antarctica/Troll", + "Antarctica/Vostok", + "Arctic/Longyearbyen", + "Asia/Aden", + "Asia/Almaty", + "Asia/Amman", + "Asia/Anadyr", + "Asia/Aqtau", + "Asia/Aqtobe", + "Asia/Ashgabat", + "Asia/Ashkhabad", + "Asia/Atyrau", + "Asia/Baghdad", + "Asia/Bahrain", + "Asia/Baku", + "Asia/Bangkok", + "Asia/Barnaul", + "Asia/Beirut", + "Asia/Bishkek", + "Asia/Brunei", + "Asia/Calcutta", + "Asia/Chita", + "Asia/Choibalsan", + "Asia/Chongqing", + "Asia/Chungking", + "Asia/Colombo", + "Asia/Dacca", + "Asia/Damascus", + "Asia/Dhaka", + "Asia/Dili", + "Asia/Dubai", + "Asia/Dushanbe", + "Asia/Famagusta", + "Asia/Gaza", + "Asia/Harbin", + "Asia/Hebron", + "Asia/Ho_Chi_Minh", + "Asia/Hong_Kong", + "Asia/Hovd", + "Asia/Irkutsk", + "Asia/Istanbul", + "Asia/Jakarta", + "Asia/Jayapura", + "Asia/Jerusalem", + "Asia/Kabul", + "Asia/Kamchatka", + "Asia/Karachi", + "Asia/Kashgar", + "Asia/Kathmandu", + "Asia/Katmandu", + "Asia/Khandyga", + "Asia/Kolkata", + "Asia/Krasnoyarsk", + "Asia/Kuala_Lumpur", + "Asia/Kuching", + "Asia/Kuwait", + "Asia/Macao", + "Asia/Macau", + "Asia/Magadan", + "Asia/Makassar", + "Asia/Manila", + "Asia/Muscat", + "Asia/Nicosia", + "Asia/Novokuznetsk", + "Asia/Novosibirsk", + "Asia/Omsk", + "Asia/Oral", + "Asia/Phnom_Penh", + "Asia/Pontianak", + "Asia/Pyongyang", + "Asia/Qatar", + "Asia/Qyzylorda", + "Asia/Rangoon", + "Asia/Riyadh", + "Asia/Saigon", + "Asia/Sakhalin", + "Asia/Samarkand", + "Asia/Seoul", + "Asia/Shanghai", + "Asia/Singapore", + "Asia/Srednekolymsk", + "Asia/Taipei", + "Asia/Tashkent", + "Asia/Tbilisi", + "Asia/Tehran", + "Asia/Tel_Aviv", + "Asia/Thimbu", + "Asia/Thimphu", + "Asia/Tokyo", + "Asia/Tomsk", + "Asia/Ujung_Pandang", + "Asia/Ulaanbaatar", + "Asia/Ulan_Bator", + "Asia/Urumqi", + "Asia/Ust-Nera", + "Asia/Vientiane", + "Asia/Vladivostok", + "Asia/Yakutsk", + "Asia/Yangon", + "Asia/Yekaterinburg", + "Asia/Yerevan", + "Atlantic/Azores", + "Atlantic/Bermuda", + "Atlantic/Canary", + "Atlantic/Cape_Verde", + "Atlantic/Faeroe", + "Atlantic/Faroe", + "Atlantic/Jan_Mayen", + "Atlantic/Madeira", + "Atlantic/Reykjavik", + "Atlantic/South_Georgia", + "Atlantic/St_Helena", + "Atlantic/Stanley", + "Australia/ACT", + "Australia/Adelaide", + "Australia/Brisbane", + "Australia/Broken_Hill", + "Australia/Canberra", + "Australia/Currie", + "Australia/Darwin", + "Australia/Eucla", + "Australia/Hobart", + "Australia/LHI", + "Australia/Lindeman", + "Australia/Lord_Howe", + "Australia/Melbourne", + "Australia/NSW", + "Australia/North", + "Australia/Perth", + "Australia/Queensland", + "Australia/South", + "Australia/Sydney", + "Australia/Tasmania", + "Australia/Victoria", + "Australia/West", + "Australia/Yancowinna", + "Brazil/Acre", + "Brazil/DeNoronha", + "Brazil/East", + "Brazil/West", + "CET", + "CST6CDT", + "Canada/Atlantic", + "Canada/Central", + "Canada/Eastern", + "Canada/Mountain", + "Canada/Newfoundland", + "Canada/Pacific", + "Canada/Saskatchewan", + "Canada/Yukon", + "Chile/Continental", + "Chile/EasterIsland", + "Cuba", + "EET", + "EST", + "EST5EDT", + "Egypt", + "Eire", + "Etc/GMT", + "Etc/GMT+0", + "Etc/GMT+1", + "Etc/GMT+10", + "Etc/GMT+11", + "Etc/GMT+12", + "Etc/GMT+2", + "Etc/GMT+3", + "Etc/GMT+4", + "Etc/GMT+5", + "Etc/GMT+6", + "Etc/GMT+7", + "Etc/GMT+8", + "Etc/GMT+9", + "Etc/GMT-0", + "Etc/GMT-1", + "Etc/GMT-10", + "Etc/GMT-11", + "Etc/GMT-12", + "Etc/GMT-13", + "Etc/GMT-14", + "Etc/GMT-2", + "Etc/GMT-3", + "Etc/GMT-4", + "Etc/GMT-5", + "Etc/GMT-6", + "Etc/GMT-7", + "Etc/GMT-8", + "Etc/GMT-9", + "Etc/GMT0", + "Etc/Greenwich", + "Etc/UCT", + "Etc/UTC", + "Etc/Universal", + "Etc/Zulu", + "Europe/Amsterdam", + "Europe/Andorra", + "Europe/Astrakhan", + "Europe/Athens", + "Europe/Belfast", + "Europe/Belgrade", + "Europe/Berlin", + "Europe/Bratislava", + "Europe/Brussels", + "Europe/Bucharest", + "Europe/Budapest", + "Europe/Busingen", + "Europe/Chisinau", + "Europe/Copenhagen", + "Europe/Dublin", + "Europe/Gibraltar", + "Europe/Guernsey", + "Europe/Helsinki", + "Europe/Isle_of_Man", + "Europe/Istanbul", + "Europe/Jersey", + "Europe/Kaliningrad", + "Europe/Kiev", + "Europe/Kirov", + "Europe/Lisbon", + "Europe/Ljubljana", + "Europe/London", + "Europe/Luxembourg", + "Europe/Madrid", + "Europe/Malta", + "Europe/Mariehamn", + "Europe/Minsk", + "Europe/Monaco", + "Europe/Moscow", + "Europe/Nicosia", + "Europe/Oslo", + "Europe/Paris", + "Europe/Podgorica", + "Europe/Prague", + "Europe/Riga", + "Europe/Rome", + "Europe/Samara", + "Europe/San_Marino", + "Europe/Sarajevo", + "Europe/Saratov", + "Europe/Simferopol", + "Europe/Skopje", + "Europe/Sofia", + "Europe/Stockholm", + "Europe/Tallinn", + "Europe/Tirane", + "Europe/Tiraspol", + "Europe/Ulyanovsk", + "Europe/Uzhgorod", + "Europe/Vaduz", + "Europe/Vatican", + "Europe/Vienna", + "Europe/Vilnius", + "Europe/Volgograd", + "Europe/Warsaw", + "Europe/Zagreb", + "Europe/Zaporozhye", + "Europe/Zurich", + "GB", + "GB-Eire", + "GMT", + "GMT+0", + "GMT-0", + "GMT0", + "Greenwich", + "HST", + "Hongkong", + "Iceland", + "Indian/Antananarivo", + "Indian/Chagos", + "Indian/Christmas", + "Indian/Cocos", + "Indian/Comoro", + "Indian/Kerguelen", + "Indian/Mahe", + "Indian/Maldives", + "Indian/Mauritius", + "Indian/Mayotte", + "Indian/Reunion", + "Iran", + "Israel", + "Jamaica", + "Japan", + "Kwajalein", + "Libya", + "MET", + "MST", + "MST7MDT", + "Mexico/BajaNorte", + "Mexico/BajaSur", + "Mexico/General", + "NZ", + "NZ-CHAT", + "Navajo", + "PRC", + "PST8PDT", + "Pacific/Apia", + "Pacific/Auckland", + "Pacific/Bougainville", + "Pacific/Chatham", + "Pacific/Chuuk", + "Pacific/Easter", + "Pacific/Efate", + "Pacific/Enderbury", + "Pacific/Fakaofo", + "Pacific/Fiji", + "Pacific/Funafuti", + "Pacific/Galapagos", + "Pacific/Gambier", + "Pacific/Guadalcanal", + "Pacific/Guam", + "Pacific/Honolulu", + "Pacific/Johnston", + "Pacific/Kiritimati", + "Pacific/Kosrae", + "Pacific/Kwajalein", + "Pacific/Majuro", + "Pacific/Marquesas", + "Pacific/Midway", + "Pacific/Nauru", + "Pacific/Niue", + "Pacific/Norfolk", + "Pacific/Noumea", + "Pacific/Pago_Pago", + "Pacific/Palau", + "Pacific/Pitcairn", + "Pacific/Pohnpei", + "Pacific/Ponape", + "Pacific/Port_Moresby", + "Pacific/Rarotonga", + "Pacific/Saipan", + "Pacific/Samoa", + "Pacific/Tahiti", + "Pacific/Tarawa", + "Pacific/Tongatapu", + "Pacific/Truk", + "Pacific/Wake", + "Pacific/Wallis", + "Pacific/Yap", + "Poland", + "Portugal", + "ROC", + "ROK", + "Singapore", + "Turkey", + "UCT", + "US/Alaska", + "US/Aleutian", + "US/Arizona", + "US/Central", + "US/East-Indiana", + "US/Eastern", + "US/Hawaii", + "US/Indiana-Starke", + "US/Michigan", + "US/Mountain", + "US/Pacific", + "US/Pacific-New", + "US/Samoa", + "UTC", + "Universal", + "W-SU", + "WET", + "Zulu", +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/shared/timezones/timezones.go b/vendor/github.com/mattermost/mattermost/server/public/shared/timezones/timezones.go new file mode 100644 index 00000000..e211a4cd --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/shared/timezones/timezones.go @@ -0,0 +1,29 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package timezones + +type Timezones struct { + supportedZones []string +} + +func New() *Timezones { + timezones := Timezones{} + + timezones.supportedZones = DefaultSupportedTimezones + + return &timezones +} + +func (t *Timezones) GetSupported() []string { + return t.supportedZones +} + +func DefaultUserTimezone() map[string]string { + defaultTimezone := make(map[string]string) + defaultTimezone["useAutomaticTimezone"] = "true" + defaultTimezone["automaticTimezone"] = "" + defaultTimezone["manualTimezone"] = "" + + return defaultTimezone +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/utils/array.go b/vendor/github.com/mattermost/mattermost/server/public/utils/array.go new file mode 100644 index 00000000..54b9caa0 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/utils/array.go @@ -0,0 +1,45 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package utils + +// FindExclusives returns three arrays: +// 1. Items exclusive to arr1 +// 2. Items exclusive to arr2 +// 3. Items common to both arr1 and arr2 +func FindExclusives[T comparable](arr1, arr2 []T) ([]T, []T, []T) { + // Create maps to track the presence of elements in each array + existsInArr1 := make(map[T]bool) + existsInArr2 := make(map[T]bool) + + // Populate the maps with the elements from both arrays + for _, elem := range arr1 { + existsInArr1[elem] = true + } + for _, elem := range arr2 { + existsInArr2[elem] = true + } + + // Slices for results + var uniqueToArr1 []T + var uniqueToArr2 []T + var common []T + + // Find elements unique to arr1 and common elements + for elem := range existsInArr1 { + if existsInArr2[elem] { + common = append(common, elem) + } else { + uniqueToArr1 = append(uniqueToArr1, elem) + } + } + + // Find elements unique to arr2 + for elem := range existsInArr2 { + if !existsInArr1[elem] { + uniqueToArr2 = append(uniqueToArr2, elem) + } + } + + return uniqueToArr1, uniqueToArr2, common +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/utils/file.go b/vendor/github.com/mattermost/mattermost/server/public/utils/file.go new file mode 100644 index 00000000..04efd090 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/utils/file.go @@ -0,0 +1,124 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package utils + +import ( + "fmt" + "io" + "os" + "path/filepath" +) + +// CopyFile will copy a file from src path to dst path. +// Overwrites any existing files at dst. +// Permissions are copied from file at src to the new file at dst. +func CopyFile(src, dst string) (err error) { + in, err := os.Open(src) + if err != nil { + return + } + defer in.Close() + + if err = os.MkdirAll(filepath.Dir(dst), os.ModePerm); err != nil { + return + } + out, err := os.Create(dst) + if err != nil { + return + } + defer func() { + if e := out.Close(); e != nil { + err = e + } + }() + + _, err = io.Copy(out, in) + if err != nil { + return + } + + err = out.Sync() + if err != nil { + return + } + + stat, err := os.Stat(src) + if err != nil { + return + } + err = os.Chmod(dst, stat.Mode()) + if err != nil { + return + } + + return +} + +// CopyDir will copy a directory and all contained files and directories. +// src must exist and dst must not exist. +// Permissions are preserved when possible. Symlinks are skipped. +func CopyDir(src string, dst string) (err error) { + src, err = filepath.Abs(src) + if err != nil { + return + } + dst, err = filepath.Abs(dst) + if err != nil { + return + } + + stat, err := os.Stat(src) + if err != nil { + return + } + if !stat.IsDir() { + return fmt.Errorf("source must be a directory") + } + + _, err = os.Stat(dst) + if err != nil && !os.IsNotExist(err) { + return + } + if err == nil { + return fmt.Errorf("destination already exists") + } + + err = os.MkdirAll(dst, stat.Mode()) + if err != nil { + return + } + + items, err := os.ReadDir(src) + if err != nil { + return + } + + for _, item := range items { + srcPath := filepath.Join(src, item.Name()) + dstPath := filepath.Join(dst, item.Name()) + + if item.IsDir() { + err = CopyDir(srcPath, dstPath) + if err != nil { + return + } + } else { + info, ierr := item.Info() + if ierr != nil { + continue + } + + if info.Mode()&os.ModeSymlink != 0 { + continue + } + + err = CopyFile(srcPath, dstPath) + if err != nil { + return + } + } + } + + return +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/utils/fileutils.go b/vendor/github.com/mattermost/mattermost/server/public/utils/fileutils.go new file mode 100644 index 00000000..55f7d59d --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/utils/fileutils.go @@ -0,0 +1,137 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package utils + +import ( + "os" + "path/filepath" + "regexp" + "strings" +) + +func CommonBaseSearchPaths() []string { + paths := []string{ + ".", + "..", + "../..", + "../../..", + "../../../..", + } + + return paths +} + +func findPath(path string, baseSearchPaths []string, workingDirFirst bool, filter func(os.FileInfo) bool) string { + if filepath.IsAbs(path) { + if _, err := os.Stat(path); err == nil { + return path + } + + return "" + } + + searchPaths := []string{} + if workingDirFirst { + searchPaths = append(searchPaths, baseSearchPaths...) + } + + // Attempt to search relative to the location of the running binary either before + // or after searching relative to the working directory, depending on `workingDirFirst`. + var binaryDir string + if exe, err := os.Executable(); err == nil { + if exe, err = filepath.EvalSymlinks(exe); err == nil { + if exe, err = filepath.Abs(exe); err == nil { + binaryDir = filepath.Dir(exe) + } + } + } + if binaryDir != "" { + for _, baseSearchPath := range baseSearchPaths { + searchPaths = append( + searchPaths, + filepath.Join(binaryDir, baseSearchPath), + ) + } + } + + if !workingDirFirst { + searchPaths = append(searchPaths, baseSearchPaths...) + } + + for _, parent := range searchPaths { + found, err := filepath.Abs(filepath.Join(parent, path)) + if err != nil { + continue + } else if fileInfo, err := os.Stat(found); err == nil { + if filter != nil { + if filter(fileInfo) { + return found + } + } else { + return found + } + } + } + + return "" +} + +func FindPath(path string, baseSearchPaths []string, filter func(os.FileInfo) bool) string { + return findPath(path, baseSearchPaths, true, filter) +} + +// FindFile looks for the given file in nearby ancestors relative to the current working +// directory as well as the directory of the executable. +func FindFile(path string) string { + return FindPath(path, CommonBaseSearchPaths(), func(fileInfo os.FileInfo) bool { + return !fileInfo.IsDir() + }) +} + +// fileutils.FindDir looks for the given directory in nearby ancestors relative to the current working +// directory as well as the directory of the executable, falling back to `./` if not found. +func FindDir(dir string) (string, bool) { + found := FindPath(dir, CommonBaseSearchPaths(), func(fileInfo os.FileInfo) bool { + return fileInfo.IsDir() + }) + if found == "" { + return "./", false + } + + return found, true +} + +// FindDirRelBinary looks for the given directory in nearby ancestors relative to the +// directory of the executable, then relative to the working directory, falling back to `./` if not found. +func FindDirRelBinary(dir string) (string, bool) { + found := findPath(dir, CommonBaseSearchPaths(), false, func(fileInfo os.FileInfo) bool { + return fileInfo.IsDir() + }) + if found == "" { + return "./", false + } + return found, true +} + +// Valid characters are: alphanumeric, dash, underscore +var safeFileNameRegex = regexp.MustCompile(`[^\w\-\_]`) + +// SanitizeFileName takes a string and returns a safe file name without an extension. +func SanitizeFileName(input string) string { + // Trim leading or trailing dots or spaces + safeName := strings.Trim(input, ". ") + // Replace dots with nothing + safeName = strings.ReplaceAll(safeName, ".", "") + + // Replace all invalid characters with an underscore + safeName = safeFileNameRegex.ReplaceAllString(safeName, "_") + + // Limit length + const maxLength = 100 + if len(safeName) > maxLength { + safeName = safeName[:maxLength] + } + + return safeName +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/utils/json.go b/vendor/github.com/mattermost/mattermost/server/public/utils/json.go new file mode 100644 index 00000000..a2c80974 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/utils/json.go @@ -0,0 +1,98 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package utils + +import ( + "bytes" + "encoding/json" + "strings" + "unicode" + + "github.com/pkg/errors" +) + +type HumanizedJSONError struct { + Err error + Line int + Character int +} + +func (e *HumanizedJSONError) Error() string { + return e.Err.Error() +} + +// HumanizeJSONError extracts error offsets and annotates the error with useful context +func HumanizeJSONError(err error, data []byte) error { + if syntaxError, ok := err.(*json.SyntaxError); ok { + return NewHumanizedJSONError(syntaxError, data, syntaxError.Offset) + } else if unmarshalError, ok := err.(*json.UnmarshalTypeError); ok { + return NewHumanizedJSONError(unmarshalError, data, unmarshalError.Offset) + } + return err +} + +func NewHumanizedJSONError(err error, data []byte, offset int64) *HumanizedJSONError { + if err == nil { + return nil + } + + if offset < 0 || offset > int64(len(data)) { + return &HumanizedJSONError{ + Err: errors.Wrapf(err, "invalid offset %d", offset), + } + } + + lineSep := []byte{'\n'} + + line := bytes.Count(data[:offset], lineSep) + 1 + lastLineOffset := bytes.LastIndex(data[:offset], lineSep) + character := int(offset) - (lastLineOffset + 1) + 1 + + return &HumanizedJSONError{ + Line: line, + Character: character, + Err: errors.Wrapf(err, "parsing error at line %d, character %d", line, character), + } +} + +func IsEmptyJSON(j json.RawMessage) bool { + if len(j) == 0 { + return true + } + + // remove all whitespace + jj := make([]byte, 0, len(j)) + for _, b := range j { + if !unicode.IsSpace(rune(b)) { + jj = append(jj, b) + } + } + + if len(jj) == 0 || bytes.Equal(jj, []byte("{}")) || bytes.Equal(jj, []byte("\"\"")) || bytes.Equal(jj, []byte("[]")) { + return true + } + return false +} + +func StringPtrToJSON(ptr *string) json.RawMessage { + if ptr == nil || len(*ptr) == 0 { + return []byte("{}") + } + s := *ptr + + if strings.HasPrefix(s, "{") && strings.HasSuffix(s, "}") { + return []byte(s) + } + + if strings.HasPrefix(s, "[") && strings.HasSuffix(s, "]") { + return []byte(s) + } + + if strings.HasPrefix(s, "\"") && strings.HasSuffix(s, "\"") { + return []byte(s) + } + + // This must be a bare string which will need quotes to make a valid JSON document. + return []byte("\"" + s + "\"") +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/utils/page.go b/vendor/github.com/mattermost/mattermost/server/public/utils/page.go new file mode 100644 index 00000000..c9e00003 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/utils/page.go @@ -0,0 +1,43 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package utils + +// Pager fetches all items from a paginated API. +// Pager is a generic function that fetches and aggregates paginated data. +// It takes a fetch function and a perPage parameter as arguments. +// +// The fetch function is responsible for retrieving a slice of items of type T +// for a given page number. It returns the fetched items and an error, if any. +// Ideally a developer may want to use a closure to create a fetch function. +// +// The perPage parameter specifies the number of items to fetch per page. +// +// Example usage: +// +// items, err := Pager(fetchFunc, 10) +// if err != nil { +// // handle error +// } +// // process items +func Pager[T any](fetch func(page int) ([]T, error), perPage int) ([]T, error) { + var list []T + var page int + + for { + fetched, err := fetch(page) + if err != nil { + return list, err + } + + list = append(list, fetched...) + + if len(fetched) < perPage { + break + } + + page++ + } + + return list, nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/utils/sql/sql_utils.go b/vendor/github.com/mattermost/mattermost/server/public/utils/sql/sql_utils.go new file mode 100644 index 00000000..bb847517 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/utils/sql/sql_utils.go @@ -0,0 +1,76 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package sql + +import ( + "context" + dbsql "database/sql" + "strings" + "time" + + "github.com/mattermost/mattermost/server/public/model" + "github.com/mattermost/mattermost/server/public/shared/mlog" + "github.com/pkg/errors" +) + +const ( + DBPingTimeout = 10 * time.Second + DBConnRetrySleep = 2 * time.Second + + replicaLagPrefix = "replica-lag" +) + +// SetupConnection sets up the connection to the database and pings it to make sure it's alive. +// It also applies any database configuration settings that are required. +func SetupConnection(logger mlog.LoggerIFace, connType string, dataSource string, settings *model.SqlSettings, attempts int) (*dbsql.DB, error) { + db, err := dbsql.Open(*settings.DriverName, dataSource) + if err != nil { + return nil, errors.Wrap(err, "failed to open SQL connection") + } + + // At this point, we have passed sql.Open, so we deliberately ignore any errors. + sanitized, _ := model.SanitizeDataSource(*settings.DriverName, dataSource) + + logger = logger.With( + mlog.String("database", connType), + mlog.String("dataSource", sanitized), + ) + + for attempt := 1; attempt <= attempts; attempt++ { + if attempt > 1 { + logger.Info("Pinging SQL", mlog.Int("attempt", attempt)) + } + ctx, cancel := context.WithTimeout(context.Background(), DBPingTimeout) + defer cancel() + err = db.PingContext(ctx) + if err != nil { + if attempt == attempts { + return nil, err + } + logger.Error("Failed to ping DB", mlog.Float("retrying in seconds", DBConnRetrySleep.Seconds()), mlog.Err(err)) + time.Sleep(DBConnRetrySleep) + continue + } + break + } + + if strings.HasPrefix(connType, replicaLagPrefix) { + // If this is a replica lag connection, we just open one connection. + // + // Arguably, if the query doesn't require a special credential, it does take up + // one extra connection from the replica DB. But falling back to the replica + // data source when the replica lag data source is null implies an ordering constraint + // which makes things brittle and is not a good design. + // If connections are an overhead, it is advised to use a connection pool. + db.SetMaxOpenConns(1) + db.SetMaxIdleConns(1) + } else { + db.SetMaxIdleConns(*settings.MaxIdleConns) + db.SetMaxOpenConns(*settings.MaxOpenConns) + } + db.SetConnMaxLifetime(time.Duration(*settings.ConnMaxLifetimeMilliseconds) * time.Millisecond) + db.SetConnMaxIdleTime(time.Duration(*settings.ConnMaxIdleTimeMilliseconds) * time.Millisecond) + + return db, nil +} diff --git a/vendor/github.com/mattermost/mattermost/server/public/utils/timeutils/time.go b/vendor/github.com/mattermost/mattermost/server/public/utils/timeutils/time.go new file mode 100644 index 00000000..be677739 --- /dev/null +++ b/vendor/github.com/mattermost/mattermost/server/public/utils/timeutils/time.go @@ -0,0 +1,29 @@ +// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. +// See LICENSE.txt for license information. + +package timeutils + +import ( + "time" +) + +const ( + RFC3339Milli = "2006-01-02T15:04:05.999Z07:00" +) + +func FormatMillis(millis int64) string { + return time.UnixMilli(millis).Format(RFC3339Milli) +} + +func ParseFormatedMillis(s string) (millis int64, err error) { + if s == "" { + return 0, nil + } + + t, err := time.Parse(RFC3339Milli, s) + if err != nil { + return 0, err + } + + return t.UnixMilli(), nil +} diff --git a/vendor/github.com/mattermost/morph/.gitignore b/vendor/github.com/mattermost/morph/.gitignore new file mode 100644 index 00000000..706fd07f --- /dev/null +++ b/vendor/github.com/mattermost/morph/.gitignore @@ -0,0 +1,2 @@ +.idea +.vscode diff --git a/vendor/github.com/mattermost/morph/AUTHORS b/vendor/github.com/mattermost/morph/AUTHORS new file mode 100644 index 00000000..49d0cdd0 --- /dev/null +++ b/vendor/github.com/mattermost/morph/AUTHORS @@ -0,0 +1,12 @@ +# This is the official list of go-morph authors for copyright purposes. +# +# This does not necessarily list everyone who has contributed code, since in +# some cases, their employer may be the copyright holder. To see the full list +# of contributors, see the revision history in source control or +# https://github.com/go-morph/morph/graphs/contributors. +# +# Authors who wish to be recognized in this file should add themselves (or +# their employer, as appropriate). + +mgdelacroix +nronas diff --git a/vendor/github.com/mattermost/morph/LICENSE b/vendor/github.com/mattermost/morph/LICENSE new file mode 100644 index 00000000..41a7f1df --- /dev/null +++ b/vendor/github.com/mattermost/morph/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2021 The go-morph AUTHORS. All rights reserved. +https://github.com/go-morph/morph + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/vendor/github.com/mattermost/morph/Makefile b/vendor/github.com/mattermost/morph/Makefile new file mode 100644 index 00000000..293ad2a6 --- /dev/null +++ b/vendor/github.com/mattermost/morph/Makefile @@ -0,0 +1,42 @@ +all: test + +GO=go + +.PHONY: test +test: + $(GO) clean -testcache + make test-drivers + make test-rest + +.PHONY: test-rest +test-rest: + $(GO) clean -testcache + $(GO) test -race -v -tags=!drivers,sources ./... + +.PHONY: test-drivers +test-drivers: + $(GO) clean -testcache + $(GO) test -v -tags=drivers,!sources ./drivers/postgres/ + +.PHONY: update-dependencies +update-dependencies: + $(GO) get -u ./... + $(GO) mod vendor + $(GO) mod tidy + +.PHONY: vendor +vendor: + $(GO) mod vendor + $(GO) mod tidy + +.PHONY: check +check: + $(GO) fmt ./... + +.PHONY: run-databases +run-databases: + docker-compose up --no-recreate -d + +.PHONY: install +install: + $(GO) install -mod=readonly -trimpath ./cmd/morph diff --git a/vendor/github.com/mattermost/morph/README.md b/vendor/github.com/mattermost/morph/README.md new file mode 100644 index 00000000..0acac441 --- /dev/null +++ b/vendor/github.com/mattermost/morph/README.md @@ -0,0 +1,81 @@ +![Morph_Logo](https://user-images.githubusercontent.com/7295363/200433166-7bed1543-e586-42a1-b7d6-8ccae5276c26.png) + +> A database migration tool designed to make schema migrations easy. + +# Morph + +[![GitHub Workflow Status (branch)](https://img.shields.io/github/workflow/status/mattermost/morph/CI)](https://github.com/mattermost/morph/actions/workflows/ci.yml?query=branch%3Amaster) +[![GoDoc](https://pkg.go.dev/badge/github.com/mattermost/migrate)](https://pkg.go.dev/github.com/mattermost/morph) + +As an application evolves, the data flowing inside inevitably evolves along with it. If you have an application that persists the data in a relational database, the way you store the data will probably change over time. Morph is a database migration tool that helps you to apply your migrations. It is written with Go so you can use it from your Go application as well. Read our [blog post](https://mattermost.com/blog/morph-database-schema-migrations-made-easy/) to learn more about the motivation behind this project. + +## Usage + +Morph can be used as a library or a CLI tool. + +### Library + +```Go +import ( + "context" + + "github.com/mattermost/morph" + "github.com/mattermost/morph/drivers/mysql" + "github.com/mattermost/morph/sources/embedded" +) + +src, err := embedded.WithInstance(&embedded.AssetSource{ + Names: []string{}, // add migration file names + AssetFunc: func(name string) ([]byte, error) { + return []byte{}, nil // should return the file contents + }, +}) +if err != nil { + return err +} +defer src.Close() + +driver, err := mysql.WithInstance(db) +if err != nil { + return err +} + +engine, err := morph.New(context.Background(), driver, src) +if err != nil { + return err +} +defer engine.Close() + +engine.ApplyAll() + +``` + +### CLI + +To install `morph` you can use: + +```bash +go install github.com/mattermost/morph/cmd/morph@latest +``` + +Then you can apply your migrations like below: + +```bash +morph apply up --driver postgres --dsn "postgres://user:pass@localhost:5432/mydb?sslmode=disable" --path ./db/migrations/postgres --number 1 +``` + +## Migration Files + +The migrations files should have an `up` and `down` versions. The program requires each migration to be reversible, and the naming of the migration should be in the following form: +``` +0000000001_create_user.up.sql +0000000001_create_user.down.sql +``` + +The first part will be used to determine the order in which the migrations should be applied and the next part until the `up|down.sql` suffix will be the migration name. + +The program requires this naming convention to be followed as it saves the order and names of the migrations. Also, it can rollback migrations with the `down` files. + +## LICENSE + +[MIT](LICENSE) diff --git a/vendor/github.com/mattermost/morph/color_logger.go b/vendor/github.com/mattermost/morph/color_logger.go new file mode 100644 index 00000000..377e684d --- /dev/null +++ b/vendor/github.com/mattermost/morph/color_logger.go @@ -0,0 +1,36 @@ +package morph + +import ( + "log" + + "github.com/fatih/color" +) + +var ( + ErrorLogger = color.New(color.FgRed, color.Bold) + ErrorLoggerLight = color.New(color.FgRed) + InfoLogger = color.New(color.FgCyan, color.Bold) + InfoLoggerLight = color.New(color.FgCyan) + SuccessLogger = color.New(color.FgGreen, color.Bold) +) + +type Logger interface { + Printf(format string, v ...interface{}) + Println(v ...interface{}) +} + +type colorLogger struct { + log *log.Logger +} + +func newColorLogger(log *log.Logger) *colorLogger { + return &colorLogger{log: log} +} + +func (l *colorLogger) Printf(format string, v ...interface{}) { + l.log.Println(InfoLoggerLight.Sprintf(format, v...)) +} + +func (l *colorLogger) Println(v ...interface{}) { + l.log.Println(InfoLoggerLight.Sprint(v...)) +} diff --git a/vendor/github.com/mattermost/morph/docker-compose.yml b/vendor/github.com/mattermost/morph/docker-compose.yml new file mode 100644 index 00000000..948a02c6 --- /dev/null +++ b/vendor/github.com/mattermost/morph/docker-compose.yml @@ -0,0 +1,22 @@ +version: '3.1' +services: + postgres: + image: postgres + restart: always + ports: + - "6432:5432" + environment: + POSTGRES_PASSWORD: morph + POSTGRES_DB: morph_test + POSTGRES_USER: morph + mysql: + image: "mysql:5.7" + restart: always + ports: + - "3307:3306" + command: --default-authentication-plugin=mysql_native_password + environment: + MYSQL_DATABASE: morph_test + MYSQL_USER: morph + MYSQL_PASSWORD: morph + MYSQL_ROOT_PASSWORD: morph \ No newline at end of file diff --git a/vendor/github.com/mattermost/morph/drivers/driver.go b/vendor/github.com/mattermost/morph/drivers/driver.go new file mode 100644 index 00000000..a39cdde1 --- /dev/null +++ b/vendor/github.com/mattermost/morph/drivers/driver.go @@ -0,0 +1,28 @@ +package drivers + +import ( + "github.com/mattermost/morph/models" +) + +type Config struct { + MigrationsTable string + // StatementTimeoutInSecs is used to set a timeout for each migration file. + // Set below zero to disable timeout. Zero value will result in default value, which is 60 seconds. + StatementTimeoutInSecs int + MigrationMaxSize int +} + +type Driver interface { + Ping() error + // Close closes the underlying db connection. If the driver is created via Open() function + // this method will also going to call Close() on the sql.db instance. + Close() error + Apply(migration *models.Migration, saveVersion bool) error + AppliedMigrations() ([]*models.Migration, error) + // SetConfig should be used to set the driver configuration. The key is the name of the configuration + // This method should return an error if the key is not supported. + // This method is being used by the morph engine to apply configurations such as: + // StatementTimeoutInSecs + // MigrationsTableName + SetConfig(key string, value interface{}) error +} diff --git a/vendor/github.com/mattermost/morph/drivers/error.go b/vendor/github.com/mattermost/morph/drivers/error.go new file mode 100644 index 00000000..ece09b51 --- /dev/null +++ b/vendor/github.com/mattermost/morph/drivers/error.go @@ -0,0 +1,25 @@ +package drivers + +import "fmt" + +type AppError struct { + OrigErr error + Driver string + Message string +} + +type DatabaseError struct { + OrigErr error + Driver string + Message string + Command string + Query []byte +} + +func (ae *AppError) Error() string { + return fmt.Sprintf("driver: %s, message: %s, originalError: %v ", ae.Driver, ae.Message, ae.OrigErr) +} + +func (de *DatabaseError) Error() string { + return fmt.Sprintf("driver: %s, message: %s, command: %s, originalError: %v, query: \n\n%s\n", de.Driver, de.Message, de.Command, de.OrigErr, string(de.Query)) +} diff --git a/vendor/github.com/mattermost/morph/drivers/lock.go b/vendor/github.com/mattermost/morph/drivers/lock.go new file mode 100644 index 00000000..0a0f3bd8 --- /dev/null +++ b/vendor/github.com/mattermost/morph/drivers/lock.go @@ -0,0 +1,84 @@ +package drivers + +import ( + "context" + "errors" + "math/rand" + "time" +) + +const ( + // MutexTableName is the name being used for the mutex table + MutexTableName = "db_lock" + + // minWaitInterval is the minimum amount of time to wait between locking attempts + minWaitInterval = 1 * time.Second + + // maxWaitInterval is the maximum amount of time to wait between locking attempts + maxWaitInterval = 5 * time.Minute + + // pollWaitInterval is the usual time to wait between unsuccessful locking attempts + pollWaitInterval = 1 * time.Second + + // jitterWaitInterval is the amount of jitter to add when waiting to avoid thundering herds + jitterWaitInterval = minWaitInterval / 2 + + // TTL is the interval after which a locked mutex will expire unless refreshed + TTL = time.Second * 15 + + // RefreshInterval is the interval on which the mutex will be refreshed when locked + RefreshInterval = TTL / 2 +) + +// MakeLockKey returns the prefixed key used to namespace mutex keys. +func MakeLockKey(key string) (string, error) { + if key == "" { + return "", errors.New("must specify valid mutex key") + } + + return key, nil +} + +// NextWaitInterval determines how long to wait until the next lock retry. +func NextWaitInterval(lastWaitInterval time.Duration, err error) time.Duration { + nextWaitInterval := lastWaitInterval + + if nextWaitInterval <= 0 { + nextWaitInterval = minWaitInterval + } + + if err != nil { + nextWaitInterval *= 2 + if nextWaitInterval > maxWaitInterval { + nextWaitInterval = maxWaitInterval + } + } else { + nextWaitInterval = pollWaitInterval + } + + // Add some jitter to avoid unnecessary collision between competing other instances. + nextWaitInterval -= time.Duration(rand.Int63n(int64(jitterWaitInterval) / 2)) + + return nextWaitInterval +} + +type Locker interface { + // Lock locks m unless the context is canceled. If the mutex is already locked by any other + // instance, including the current one, the calling goroutine blocks until the mutex can be locked, + // or the context is canceled. + // + // The mutex is locked only if a nil error is returned. + Lock(ctx context.Context) error + Unlock() error +} + +type Lockable interface { + DriverName() string +} + +// IsLockable returns whether the given instance satisfies +// drivers.Lockable or not. +func IsLockable(x interface{}) bool { + _, ok := x.(Lockable) + return ok +} diff --git a/vendor/github.com/mattermost/morph/drivers/logger.go b/vendor/github.com/mattermost/morph/drivers/logger.go new file mode 100644 index 00000000..0556632e --- /dev/null +++ b/vendor/github.com/mattermost/morph/drivers/logger.go @@ -0,0 +1,19 @@ +package drivers + +import "fmt" + +type Logger interface { + Printf(format string, v ...interface{}) + Println(v ...interface{}) +} + +type DefaultLogger struct { +} + +func (DefaultLogger) Printf(format string, v ...interface{}) { + fmt.Printf(format, v...) +} + +func (DefaultLogger) Println(v ...interface{}) { + fmt.Println(v...) +} diff --git a/vendor/github.com/mattermost/morph/drivers/mysql/lock.go b/vendor/github.com/mattermost/morph/drivers/mysql/lock.go new file mode 100644 index 00000000..c080129f --- /dev/null +++ b/vendor/github.com/mattermost/morph/drivers/mysql/lock.go @@ -0,0 +1,262 @@ +package mysql + +import ( + "context" + "database/sql" + "errors" + "fmt" + "sync" + "time" + + ms "github.com/go-sql-driver/mysql" + "github.com/mattermost/morph/drivers" +) + +// Mutex is similar to sync.Mutex, except usable by morph to lock the db. +// +// Pick a unique name for each mutex your plugin requires. +// +// A Mutex must not be copied after first use. +type Mutex struct { + noCopy // nolint:unused + key string + + // lock guards the variables used to manage the refresh task, and is not itself related to + // the db lock. + lock sync.Mutex + stopRefresh chan bool + refreshDone chan bool + conn *sql.Conn + + logger drivers.Logger +} + +// NewMutex creates a mutex with the given key name. +// +// returns error if key is empty. +func NewMutex(key string, driver drivers.Driver, logger drivers.Logger) (*Mutex, error) { + key, err := drivers.MakeLockKey(key) + if err != nil { + return nil, err + } + + ctx, cancel := context.WithTimeout(context.Background(), drivers.TTL) + defer cancel() + + ms, ok := driver.(*mysql) + if !ok { + return nil, errors.New("incorrect implementation of the driver") + } + + conn, err := ms.db.Conn(context.Background()) + if err != nil { + return nil, err + } + + createTableIfNotExistsQuery := fmt.Sprintf("CREATE TABLE IF NOT EXISTS %s (Id varchar(64) NOT NULL, ExpireAt bigint(20) NOT NULL, PRIMARY KEY (Id)) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4", drivers.MutexTableName) + if _, err = conn.ExecContext(ctx, createTableIfNotExistsQuery); err != nil { + return nil, err + } + + return &Mutex{ + key: key, + conn: conn, + logger: logger, + }, nil +} + +// lock makes a single attempt to lock the mutex, returning true only if successful. +func (m *Mutex) tryLock(ctx context.Context) (bool, error) { + now := time.Now() + tx, err := m.conn.BeginTx(ctx, nil) + if err != nil { + return false, err + } + defer m.finalizeTx(tx) + + query := fmt.Sprintf("INSERT INTO %s (Id, ExpireAt) VALUES (?, ?)", drivers.MutexTableName) + if _, err := tx.Exec(query, m.key, now.Add(drivers.TTL).Unix()); err != nil { + if mysqlErr, ok := err.(*ms.MySQLError); ok && mysqlErr.Number == 1062 { + m.logger.Println("DB is locked, going to try acquire the lock if it is expired.") + } + m.finalizeTx(tx) + + err2 := m.releaseLock(ctx, now) + if err2 == nil { // lock has been released due to expiration + return true, nil + } + + return false, fmt.Errorf("failed to lock mutex: %w", err) + } + + err = tx.Commit() + if err != nil { + return false, err + } + + return true, nil +} + +func (m *Mutex) releaseLock(ctx context.Context, t time.Time) error { + tx, err := m.conn.BeginTx(ctx, nil) + if err != nil { + return err + } + defer m.finalizeTx(tx) + + e, err := m.getExpireAt(tx) + if err != nil { + return err + } + + if t.Unix() < e { + return errors.New("could not release the lock") + } + + query := fmt.Sprintf("UPDATE %s SET ExpireAt = ? WHERE Id = ?", drivers.MutexTableName) + if err = executeTx(tx, query, t.Add(drivers.TTL).Unix(), m.key); err != nil { + return err + } + + err = tx.Commit() + if err != nil { + return fmt.Errorf("unable to set new expireat for mutex: %w", err) + } + + return nil +} + +func (m *Mutex) getExpireAt(tx *sql.Tx) (int64, error) { + var expireAt int64 + query := fmt.Sprintf("SELECT ExpireAt FROM %s WHERE Id = ?", drivers.MutexTableName) + err := tx.QueryRow(query, m.key).Scan(&expireAt) + if err != nil { + return -1, fmt.Errorf("failed to fetch mutex from db: %w", err) + } + + return expireAt, nil +} + +// refreshLock rewrites the lock key value with a new expiry, returning nil only if successful. +func (m *Mutex) refreshLock(ctx context.Context) error { + tx, err := m.conn.BeginTx(ctx, nil) + if err != nil { + return err + } + defer m.finalizeTx(tx) + + e, err := m.getExpireAt(tx) + if err != nil { + return err + } + + tmp := time.Unix(e, 0) + query := fmt.Sprintf("UPDATE %s SET ExpireAt = ? WHERE Id = ?", drivers.MutexTableName) + if err = executeTx(tx, query, tmp.Add(drivers.TTL).Unix(), m.key); err != nil { + return err + } + + err = tx.Commit() + if err != nil { + return fmt.Errorf("unable to refresh expireat for mutex: %w", err) + } + + return nil +} + +// Lock locks m unless the context is canceled. If the mutex is already locked by any other +// instance, including the current one, the calling goroutine blocks until the mutex can be locked, +// or the context is canceled. +// +// The mutex is locked only if a nil error is returned. +func (m *Mutex) Lock(ctx context.Context) error { + var waitInterval time.Duration + + for { + select { + case <-ctx.Done(): + return ctx.Err() + case <-time.After(waitInterval): + } + + ok, err := m.tryLock(ctx) + if err != nil || !ok { + waitInterval = drivers.NextWaitInterval(waitInterval, err) + continue + } + + break + } + + stop := make(chan bool) + done := make(chan bool) + go func() { + defer close(done) + t := time.NewTicker(drivers.RefreshInterval) + for { + select { + case <-t.C: + err := m.refreshLock(ctx) + if err != nil { + return + } + case <-stop: + return + } + } + }() + + m.lock.Lock() + m.stopRefresh = stop + m.refreshDone = done + m.lock.Unlock() + + return nil +} + +// Unlock unlocks m. It is a run-time error if m is not locked on entry to Unlock. +// +// Just like sync.Mutex, a locked Lock is not associated with a particular goroutine or a process. +func (m *Mutex) Unlock() error { + m.lock.Lock() + if m.stopRefresh == nil { + m.lock.Unlock() + panic("mutex has not been acquired") + } + + close(m.stopRefresh) + m.stopRefresh = nil + <-m.refreshDone + m.lock.Unlock() + + defer m.conn.Close() + + // If an error occurs deleting, the mutex will still expire, allowing later retry. + query := fmt.Sprintf("DELETE FROM %s WHERE Id = ?", drivers.MutexTableName) + _, err := m.conn.ExecContext(context.Background(), query, m.key) + return err +} + +func executeTx(tx *sql.Tx, query string, args ...interface{}) error { + if _, err := tx.Exec(query, args...); err != nil { + return err + } + + return nil +} + +func (m *Mutex) finalizeTx(tx *sql.Tx) { + if err := tx.Rollback(); err != nil && err != sql.ErrTxDone { + m.logger.Printf("failed to rollback transaction: %s", err) + } +} + +// noCopy may be embedded into structs which must not be copied +// after the first use. +// +// See https://golang.org/issues/8005#issuecomment-190753527 +// for details. +type noCopy struct{} // nolint:unused + +// Lock is a no-op used by -copylocks checker from `go vet`. +func (*noCopy) Lock() {} // nolint:unused diff --git a/vendor/github.com/mattermost/morph/drivers/mysql/mysql.go b/vendor/github.com/mattermost/morph/drivers/mysql/mysql.go new file mode 100644 index 00000000..11ac3db0 --- /dev/null +++ b/vendor/github.com/mattermost/morph/drivers/mysql/mysql.go @@ -0,0 +1,332 @@ +// Initial code generated by generator. +package mysql + +import ( + "context" + "database/sql" + "fmt" + "strconv" + + "github.com/pkg/errors" + + _ "github.com/go-sql-driver/mysql" + "github.com/mattermost/morph/drivers" + "github.com/mattermost/morph/models" +) + +const driverName = "mysql" +const defaultMigrationMaxSize = 10 * 1 << 20 // 10 MB + +// add here any custom driver configuration +var configParams = []string{ + "x-migration-max-size", + "x-migrations-table", + "x-statement-timeout", +} + +type driverConfig struct { + drivers.Config + databaseName string + closeDBonClose bool +} + +type mysql struct { + conn *sql.Conn + db *sql.DB + config *driverConfig +} + +func WithInstance(dbInstance *sql.DB) (drivers.Driver, error) { + driverConfig := getDefaultConfig() + + conn, err := dbInstance.Conn(context.Background()) + if err != nil { + return nil, &drivers.DatabaseError{Driver: driverName, Command: "grabbing_connection", OrigErr: err, Message: "failed to grab connection to the database"} + } + + if driverConfig.databaseName, err = currentDatabaseNameFromDB(conn, driverConfig); err != nil { + return nil, err + } + + return &mysql{config: driverConfig, conn: conn, db: dbInstance}, nil +} + +func Open(connURL string) (drivers.Driver, error) { + customParams, err := drivers.ExtractCustomParams(connURL, configParams) + if err != nil { + return nil, &drivers.AppError{Driver: driverName, OrigErr: err, Message: "failed to parse custom parameters from url"} + } + + sanitizedConnURL, err := drivers.RemoveParamsFromURL(connURL, configParams) + if err != nil { + return nil, &drivers.AppError{Driver: driverName, OrigErr: err, Message: "failed to sanitize url from custom parameters"} + } + + driverConfig, err := mergeConfigWithParams(customParams, getDefaultConfig()) + if err != nil { + return nil, &drivers.AppError{Driver: driverName, OrigErr: err, Message: "failed to merge custom params to driver config"} + } + + db, err := sql.Open(driverName, sanitizedConnURL) + if err != nil { + return nil, &drivers.DatabaseError{Driver: driverName, Command: "opening_connection", OrigErr: err, Message: "failed to open connection with the database"} + } + + conn, err := db.Conn(context.Background()) + if err != nil { + return nil, &drivers.DatabaseError{Driver: driverName, Command: "grabbing_connection", OrigErr: err, Message: "failed to grab connection to the database"} + } + + if driverConfig.databaseName, err = extractDatabaseNameFromURL(sanitizedConnURL); err != nil { + return nil, &drivers.AppError{Driver: driverName, OrigErr: err, Message: "failed to extract database name from connection url"} + } + + driverConfig.closeDBonClose = true + + return &mysql{ + conn: conn, + db: db, + config: driverConfig, + }, nil +} + +func (driver *mysql) Ping() error { + ctx, cancel := drivers.GetContext(driver.config.StatementTimeoutInSecs) + defer cancel() + + return driver.conn.PingContext(ctx) +} + +func (mysql) DriverName() string { + return driverName +} + +func (driver *mysql) Close() error { + if driver.conn != nil { + if err := driver.conn.Close(); err != nil { + return &drivers.DatabaseError{ + OrigErr: err, + Driver: driverName, + Message: "failed to close database connection", + Command: "mysql_conn_close", + Query: nil, + } + } + } + + if driver.db != nil && driver.config.closeDBonClose { + if err := driver.db.Close(); err != nil { + return &drivers.DatabaseError{ + OrigErr: err, + Driver: driverName, + Message: "failed to close database", + Command: "mysql_db_close", + Query: nil, + } + } + driver.db = nil + } + + driver.conn = nil + return nil +} + +func (driver *mysql) createSchemaTableIfNotExists() (err error) { + ctx, cancel := drivers.GetContext(driver.config.StatementTimeoutInSecs) + defer cancel() + + createTableIfNotExistsQuery := fmt.Sprintf("CREATE TABLE IF NOT EXISTS %s (Version bigint(20) NOT NULL, Name varchar(64) NOT NULL, PRIMARY KEY (Version)) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4", driver.config.MigrationsTable) + if _, err = driver.conn.ExecContext(ctx, createTableIfNotExistsQuery); err != nil { + return &drivers.DatabaseError{ + OrigErr: err, + Driver: driverName, + Message: "failed while executing query", + Command: "create_migrations_table_if_not_exists", + Query: []byte(createTableIfNotExistsQuery), + } + } + + return nil +} + +func (driver *mysql) Apply(migration *models.Migration, saveVersion bool) (err error) { + query := migration.Query() + ctx, cancel := drivers.GetContext(driver.config.StatementTimeoutInSecs) + defer cancel() + + if _, err := driver.conn.ExecContext(ctx, query); err != nil { + return &drivers.DatabaseError{ + OrigErr: err, + Driver: driverName, + Message: "failed when applying migration", + Command: "apply_migration", + Query: []byte(query), + } + } + + updateVersionContext, cancel := drivers.GetContext(driver.config.StatementTimeoutInSecs) + defer cancel() + + if !saveVersion { + return nil + } + + updateVersionQuery := driver.addMigrationQuery(migration) + res, err := driver.conn.ExecContext(updateVersionContext, updateVersionQuery) + if err != nil { + return &drivers.DatabaseError{ + OrigErr: err, + Driver: driverName, + Message: "failed when updating migrations table with the new version", + Command: "update_version", + Query: []byte(updateVersionQuery), + } + } + + affected, err := res.RowsAffected() + if err != nil { + return &drivers.DatabaseError{ + OrigErr: err, + Driver: driverName, + Message: "failed when reading the result for migrations table with the new version", + Command: "update_version", + Query: []byte(updateVersionQuery), + } + } + if affected == 0 { + return &drivers.DatabaseError{ + OrigErr: sql.ErrNoRows, + Driver: driverName, + Message: "could not update version, probably a version mismatch", + Command: "update_version", + Query: []byte(updateVersionQuery), + } + } + + return nil +} + +func (driver *mysql) AppliedMigrations() (migrations []*models.Migration, err error) { + if driver.conn == nil { + return nil, &drivers.AppError{ + OrigErr: errors.New("driver has no connection established"), + Message: "database connection is missing", + Driver: driverName, + } + } + + if err := driver.createSchemaTableIfNotExists(); err != nil { + return nil, err + } + + query := fmt.Sprintf("SELECT version, name FROM %s", driver.config.MigrationsTable) + ctx, cancel := drivers.GetContext(driver.config.StatementTimeoutInSecs) + defer cancel() + var appliedMigrations []*models.Migration + var version uint32 + var name string + + rows, err := driver.conn.QueryContext(ctx, query) + if err != nil { + return nil, &drivers.DatabaseError{ + OrigErr: err, + Driver: driverName, + Message: "failed to fetch applied migrations", + Command: "select_applied_migrations", + Query: []byte(query), + } + } + defer rows.Close() + + for rows.Next() { + if err := rows.Scan(&version, &name); err != nil { + return nil, &drivers.DatabaseError{ + OrigErr: err, + Driver: driverName, + Message: "failed to scan applied migration row", + Command: "scan_applied_migrations", + } + } + + appliedMigrations = append(appliedMigrations, &models.Migration{ + Name: name, + Version: version, + Direction: models.Up, + }) + } + + return appliedMigrations, nil +} + +func currentDatabaseNameFromDB(conn *sql.Conn, config *driverConfig) (string, error) { + query := "SELECT DATABASE()" + + ctx, cancel := drivers.GetContext(config.StatementTimeoutInSecs) + defer cancel() + + var databaseName string + if err := conn.QueryRowContext(ctx, query).Scan(&databaseName); err != nil { + return "", &drivers.DatabaseError{ + OrigErr: err, + Driver: driverName, + Message: "failed to fetch database name", + Command: "current_database", + Query: []byte(query), + } + } + + return databaseName, nil +} + +func mergeConfigWithParams(params map[string]string, config *driverConfig) (*driverConfig, error) { + var err error + + for _, configKey := range configParams { + if v, ok := params[configKey]; ok { + switch configKey { + case "x-migration-max-size": + if config.MigrationMaxSize, err = strconv.Atoi(v); err != nil { + return nil, errors.New(fmt.Sprintf("failed to cast config param %s of %s", configKey, v)) + } + case "x-migrations-table": + config.MigrationsTable = v + case "x-statement-timeout": + if config.StatementTimeoutInSecs, err = strconv.Atoi(v); err != nil { + return nil, errors.New(fmt.Sprintf("failed to cast config param %s of %s", configKey, v)) + } + } + } + } + + return config, nil +} + +func (driver *mysql) addMigrationQuery(migration *models.Migration) string { + if migration.Direction == models.Down { + return fmt.Sprintf("DELETE FROM %s WHERE (Version=%d AND NAME='%s')", driver.config.MigrationsTable, migration.Version, migration.Name) + } + return fmt.Sprintf("INSERT INTO %s (Version, Name) VALUES (%d, '%s')", driver.config.MigrationsTable, migration.Version, migration.Name) +} + +func (driver *mysql) SetConfig(key string, value interface{}) error { + if driver.config != nil { + switch key { + case "StatementTimeoutInSecs": + n, ok := value.(int) + if ok { + driver.config.StatementTimeoutInSecs = n + return nil + } + return fmt.Errorf("incorrect value type for %s", key) + case "MigrationsTable": + n, ok := value.(string) + if ok { + driver.config.MigrationsTable = n + return nil + } + return fmt.Errorf("incorrect value type for %s", key) + } + } + + return fmt.Errorf("incorrect key name %q", key) +} diff --git a/vendor/github.com/mattermost/morph/drivers/mysql/utils.go b/vendor/github.com/mattermost/morph/drivers/mysql/utils.go new file mode 100644 index 00000000..dbd74f4a --- /dev/null +++ b/vendor/github.com/mattermost/morph/drivers/mysql/utils.go @@ -0,0 +1,34 @@ +package mysql + +import ( + mysqlDriver "github.com/go-sql-driver/mysql" + "github.com/mattermost/morph/drivers" +) + +func ExtractMysqlDSNParams(conn string) (map[string]string, error) { + cfg, err := mysqlDriver.ParseDSN(conn) + if err != nil { + return nil, err + } + + return cfg.Params, nil +} + +func extractDatabaseNameFromURL(conn string) (string, error) { + cfg, err := mysqlDriver.ParseDSN(conn) + if err != nil { + return "", err + } + + return cfg.DBName, nil +} + +func getDefaultConfig() *driverConfig { + return &driverConfig{ + Config: drivers.Config{ + MigrationsTable: "db_migrations", + StatementTimeoutInSecs: 300, + MigrationMaxSize: defaultMigrationMaxSize, + }, + } +} diff --git a/vendor/github.com/mattermost/morph/drivers/postgres/lock.go b/vendor/github.com/mattermost/morph/drivers/postgres/lock.go new file mode 100644 index 00000000..9fa1c334 --- /dev/null +++ b/vendor/github.com/mattermost/morph/drivers/postgres/lock.go @@ -0,0 +1,262 @@ +package postgres + +import ( + "context" + "database/sql" + "errors" + "fmt" + "sync" + "time" + + "github.com/lib/pq" + "github.com/mattermost/morph/drivers" +) + +// Mutex is similar to sync.Mutex, except usable by morph to lock the db. +// +// Pick a unique name for each mutex your plugin requires. +// +// A Mutex must not be copied after first use. +type Mutex struct { + noCopy // nolint:unused + key string + + // lock guards the variables used to manage the refresh task, and is not itself related to + // the db lock. + lock sync.Mutex + stopRefresh chan bool + refreshDone chan bool + conn *sql.Conn + + logger drivers.Logger +} + +// NewMutex creates a mutex with the given key name. +// +// returns error if key is empty. +func NewMutex(key string, driver drivers.Driver, logger drivers.Logger) (*Mutex, error) { + key, err := drivers.MakeLockKey(key) + if err != nil { + return nil, err + } + + ctx, cancel := context.WithTimeout(context.Background(), drivers.TTL) + defer cancel() + + ps, ok := driver.(*postgres) + if !ok { + return nil, errors.New("incorrect implementation of the driver") + } + + conn, err := ps.db.Conn(context.Background()) + if err != nil { + return nil, err + } + + createTableIfNotExistsQuery := fmt.Sprintf("CREATE TABLE IF NOT EXISTS %s (id varchar(64) PRIMARY KEY, expireat bigint);", drivers.MutexTableName) + if _, err = conn.ExecContext(ctx, createTableIfNotExistsQuery); err != nil { + return nil, err + } + + return &Mutex{ + key: key, + conn: conn, + logger: logger, + }, nil +} + +// lock makes a single attempt to lock the mutex, returning true only if successful. +func (m *Mutex) tryLock(ctx context.Context) (bool, error) { + now := time.Now() + tx, err := m.conn.BeginTx(ctx, nil) + if err != nil { + return false, err + } + defer m.finalizeTx(tx) + + query := fmt.Sprintf("INSERT INTO %s (id, expireat) VALUES ($1, $2)", drivers.MutexTableName) + if _, err := tx.Exec(query, m.key, now.Add(drivers.TTL).Unix()); err != nil { + if pqErr, ok := err.(*pq.Error); ok && pqErr.Code == "23505" { + m.logger.Println("DB is locked, going to try acquire the lock if it is expired.") + } + m.finalizeTx(tx) + + err2 := m.releaseLock(ctx, now) + if err2 == nil { // lock has been released due to expiration + return true, nil + } + + return false, fmt.Errorf("failed to lock mutex: %w", err) + } + + err = tx.Commit() + if err != nil { + return false, err + } + + return true, nil +} + +func (m *Mutex) releaseLock(ctx context.Context, t time.Time) error { + tx, err := m.conn.BeginTx(ctx, nil) + if err != nil { + return err + } + defer m.finalizeTx(tx) + + e, err := m.getExpireAt(tx) + if err != nil { + return err + } + + if t.Unix() < e { + return errors.New("could not release the lock") + } + + query := fmt.Sprintf("UPDATE %s SET expireat = $1 WHERE id = $2", drivers.MutexTableName) + if err = executeTx(tx, query, t.Add(drivers.TTL).Unix(), m.key); err != nil { + return err + } + + err = tx.Commit() + if err != nil { + return fmt.Errorf("unable to set new expireat for mutex: %w", err) + } + + return nil +} + +func (m *Mutex) getExpireAt(tx *sql.Tx) (int64, error) { + var expireAt int64 + query := fmt.Sprintf("SELECT expireat FROM %s WHERE id = $1", drivers.MutexTableName) + err := tx.QueryRow(query, m.key).Scan(&expireAt) + if err != nil { + return -1, fmt.Errorf("failed to fetch mutex from db: %w", err) + } + + return expireAt, nil +} + +// refreshLock rewrites the lock key value with a new expiry, returning nil only if successful. +func (m *Mutex) refreshLock(ctx context.Context) error { + tx, err := m.conn.BeginTx(ctx, nil) + if err != nil { + return err + } + defer m.finalizeTx(tx) + + e, err := m.getExpireAt(tx) + if err != nil { + return err + } + + tmp := time.Unix(e, 0) + query := fmt.Sprintf("UPDATE %s SET expireat = $1 WHERE id = $2", drivers.MutexTableName) + if err = executeTx(tx, query, tmp.Add(drivers.TTL).Unix(), m.key); err != nil { + return err + } + + err = tx.Commit() + if err != nil { + return fmt.Errorf("unable to refresh expireat for mutex: %w", err) + } + + return nil +} + +// Lock locks m unless the context is canceled. If the mutex is already locked by any other +// instance, including the current one, the calling goroutine blocks until the mutex can be locked, +// or the context is canceled. +// +// The mutex is locked only if a nil error is returned. +func (m *Mutex) Lock(ctx context.Context) error { + var waitInterval time.Duration + + for { + select { + case <-ctx.Done(): + return ctx.Err() + case <-time.After(waitInterval): + } + + ok, err := m.tryLock(ctx) + if err != nil || !ok { + waitInterval = drivers.NextWaitInterval(waitInterval, err) + continue + } + + break + } + + stop := make(chan bool) + done := make(chan bool) + go func() { + defer close(done) + t := time.NewTicker(drivers.RefreshInterval) + for { + select { + case <-t.C: + err := m.refreshLock(ctx) + if err != nil { + return + } + case <-stop: + return + } + } + }() + + m.lock.Lock() + m.stopRefresh = stop + m.refreshDone = done + m.lock.Unlock() + + return nil +} + +// Unlock unlocks m. It is a run-time error if m is not locked on entry to Unlock. +// +// Just like sync.Mutex, a locked Lock is not associated with a particular goroutine or a process. +func (m *Mutex) Unlock() error { + m.lock.Lock() + if m.stopRefresh == nil { + m.lock.Unlock() + panic("mutex has not been acquired") + } + + close(m.stopRefresh) + m.stopRefresh = nil + <-m.refreshDone + m.lock.Unlock() + + defer m.conn.Close() + + // If an error occurs deleting, the mutex will still expire, allowing later retry. + query := fmt.Sprintf("DELETE FROM %s WHERE id = $1", drivers.MutexTableName) + _, err := m.conn.ExecContext(context.Background(), query, m.key) + return err +} + +func executeTx(tx *sql.Tx, query string, args ...interface{}) error { + if _, err := tx.Exec(query, args...); err != nil { + return err + } + + return nil +} + +func (m *Mutex) finalizeTx(tx *sql.Tx) { + if err := tx.Rollback(); err != nil && err != sql.ErrTxDone { + m.logger.Printf("failed to rollback transaction: %s", err) + } +} + +// noCopy may be embedded into structs which must not be copied +// after the first use. +// +// See https://golang.org/issues/8005#issuecomment-190753527 +// for details. +type noCopy struct{} // nolint:unused + +// Lock is a no-op used by -copylocks checker from `go vet`. +func (*noCopy) Lock() {} // nolint:unused diff --git a/vendor/github.com/mattermost/morph/drivers/postgres/postgres.go b/vendor/github.com/mattermost/morph/drivers/postgres/postgres.go new file mode 100644 index 00000000..761595b6 --- /dev/null +++ b/vendor/github.com/mattermost/morph/drivers/postgres/postgres.go @@ -0,0 +1,401 @@ +package postgres + +import ( + "context" + "database/sql" + "fmt" + "strconv" + "strings" + + "github.com/pkg/errors" + + _ "github.com/lib/pq" + "github.com/mattermost/morph/drivers" + "github.com/mattermost/morph/models" +) + +var ( + driverName = "postgres" + defaultMigrationMaxSize = 10 * 1 << 20 // 10 MB + configParams = []string{ + "x-migration-max-size", + "x-migrations-table", + "x-statement-timeout", + } +) + +// The format is morph: followed by a comma separated list of values. +// For now, we are taking the whole string in a single constant. +// Later, if we need more values, we can split "morph:" to a separate constant. +const nonTransactionalPrefix = "morph:nontransactional" + +type driverConfig struct { + drivers.Config + databaseName string + schemaName string + closeDBonClose bool +} + +type postgres struct { + conn *sql.Conn + db *sql.DB + config *driverConfig +} + +func WithInstance(dbInstance *sql.DB) (drivers.Driver, error) { + conn, err := dbInstance.Conn(context.Background()) + if err != nil { + return nil, &drivers.DatabaseError{Driver: driverName, Command: "grabbing_connection", OrigErr: err, Message: "failed to grab connection to the database"} + } + + driverConfig := getDefaultConfig() + if driverConfig.databaseName, err = currentDatabaseNameFromDB(conn, driverConfig); err != nil { + return nil, err + } + + if driverConfig.schemaName, err = currentSchema(conn, driverConfig); err != nil { + return nil, err + } + + return &postgres{ + conn: conn, + db: dbInstance, + config: driverConfig, + }, nil +} + +func Open(connURL string) (drivers.Driver, error) { + customParams, err := drivers.ExtractCustomParams(connURL, configParams) + if err != nil { + return nil, &drivers.AppError{Driver: driverName, OrigErr: err, Message: "failed to parse custom parameters from url"} + } + + sanitizedConnURL, err := drivers.RemoveParamsFromURL(connURL, configParams) + if err != nil { + return nil, &drivers.AppError{Driver: driverName, OrigErr: err, Message: "failed to sanitize url from custom parameters"} + } + + driverConfig, err := mergeConfigWithParams(customParams, getDefaultConfig()) + if err != nil { + return nil, &drivers.AppError{Driver: driverName, OrigErr: err, Message: "failed to merge custom params to driver config"} + } + + db, err := sql.Open(driverName, sanitizedConnURL) + if err != nil { + return nil, &drivers.DatabaseError{Driver: driverName, Command: "opening_connection", OrigErr: err, Message: "failed to open connection with the database"} + } + + conn, err := db.Conn(context.Background()) + if err != nil { + return nil, &drivers.DatabaseError{Driver: driverName, Command: "grabbing_connection", OrigErr: err, Message: "failed to grab connection to the database"} + } + + if driverConfig.databaseName, err = extractDatabaseNameFromURL(connURL); err != nil { + return nil, &drivers.AppError{Driver: driverName, OrigErr: err, Message: "failed to extract database name from connection url"} + } + + if driverConfig.schemaName, err = currentSchema(conn, driverConfig); err != nil { + return nil, err + } + + driverConfig.closeDBonClose = true + + return &postgres{ + db: db, + config: driverConfig, + conn: conn, + }, nil +} + +func currentSchema(conn *sql.Conn, config *driverConfig) (string, error) { + query := "SELECT CURRENT_SCHEMA()" + + ctx, cancel := drivers.GetContext(config.StatementTimeoutInSecs) + defer cancel() + + var schemaName string + if err := conn.QueryRowContext(ctx, query).Scan(&schemaName); err != nil { + return "", &drivers.DatabaseError{ + OrigErr: err, + Driver: driverName, + Message: "failed to fetch current schema", + Command: "current_schema", + Query: []byte(query), + } + } + return schemaName, nil +} + +func mergeConfigWithParams(params map[string]string, config *driverConfig) (*driverConfig, error) { + var err error + + for _, configKey := range configParams { + if v, ok := params[configKey]; ok { + switch configKey { + case "x-migration-max-size": + if config.MigrationMaxSize, err = strconv.Atoi(v); err != nil { + return nil, errors.New(fmt.Sprintf("failed to cast config param %s of %s", configKey, v)) + } + case "x-migrations-table": + config.MigrationsTable = v + case "x-statement-timeout": + if config.StatementTimeoutInSecs, err = strconv.Atoi(v); err != nil { + return nil, errors.New(fmt.Sprintf("failed to cast config param %s of %s", configKey, v)) + } + } + } + } + + return config, nil +} + +func (pg *postgres) Ping() error { + ctx, cancel := drivers.GetContext(pg.config.StatementTimeoutInSecs) + defer cancel() + + return pg.conn.PingContext(ctx) +} + +func (pg *postgres) createSchemaTableIfNotExists() (err error) { + ctx, cancel := drivers.GetContext(pg.config.StatementTimeoutInSecs) + defer cancel() + + createTableIfNotExistsQuery := fmt.Sprintf("CREATE TABLE IF NOT EXISTS %s (version bigint not null primary key, name varchar not null)", pg.config.MigrationsTable) + if _, err = pg.conn.ExecContext(ctx, createTableIfNotExistsQuery); err != nil { + return &drivers.DatabaseError{ + OrigErr: err, + Driver: driverName, + Message: "failed while executing query", + Command: "create_migrations_table_if_not_exists", + Query: []byte(createTableIfNotExistsQuery), + } + } + + return nil +} + +func (postgres) DriverName() string { + return driverName +} + +func (pg *postgres) Close() error { + if pg.conn != nil { + if err := pg.conn.Close(); err != nil { + return &drivers.DatabaseError{ + OrigErr: err, + Driver: driverName, + Message: "failed to close database connection", + Command: "pg_conn_close", + Query: nil, + } + } + } + + if pg.db != nil && pg.config.closeDBonClose { + if err := pg.db.Close(); err != nil { + return &drivers.DatabaseError{ + OrigErr: err, + Driver: driverName, + Message: "failed to close database", + Command: "pg_db_close", + Query: nil, + } + } + pg.db = nil + } + + pg.conn = nil + return nil +} + +func (pg *postgres) Apply(migration *models.Migration, saveVersion bool) (err error) { + query := migration.Query() + + ctx, cancel := drivers.GetContext(pg.config.StatementTimeoutInSecs) + defer cancel() + + nonTransactional := strings.HasPrefix(query, "-- "+nonTransactionalPrefix) + // We wrap with a transaction only when there is no non-transactional prefix. + if !nonTransactional { + transaction, err := pg.conn.BeginTx(ctx, nil) + if err != nil { + return &drivers.DatabaseError{ + OrigErr: err, + Driver: driverName, + Message: "error while opening a transaction to the database", + Command: "begin_transaction", + } + } + + if err = executeQuery(ctx, transaction, query); err != nil { + return err + } + + if saveVersion { + if err = executeQuery(ctx, transaction, pg.addMigrationQuery(migration)); err != nil { + return err + } + } + + err = transaction.Commit() + if err != nil { + return &drivers.DatabaseError{ + OrigErr: err, + Driver: driverName, + Message: "error while committing a transaction to the database", + Command: "commit_transaction", + } + } + } else { + _, err := pg.conn.ExecContext(ctx, query) + if err != nil { + return &drivers.DatabaseError{ + OrigErr: err, + Driver: driverName, + Message: "failed to execute migration", + Command: "executing_query", + Query: []byte(query), + } + } + + if saveVersion { + _, err = pg.conn.ExecContext(ctx, pg.addMigrationQuery(migration)) + if err != nil { + return &drivers.DatabaseError{ + OrigErr: err, + Driver: driverName, + Message: "failed to save version", + Command: "executing_query", + Query: []byte(query), + } + } + } + } + + return nil +} + +func (pg *postgres) AppliedMigrations() (migrations []*models.Migration, err error) { + if pg.conn == nil { + return nil, &drivers.AppError{ + OrigErr: errors.New("driver has no connection established"), + Message: "database connection is missing", + Driver: driverName, + } + } + + if err := pg.createSchemaTableIfNotExists(); err != nil { + return nil, err + } + + query := fmt.Sprintf("SELECT version, name FROM %s", pg.config.MigrationsTable) + ctx, cancel := drivers.GetContext(pg.config.StatementTimeoutInSecs) + defer cancel() + var appliedMigrations []*models.Migration + var version uint32 + var name string + + rows, err := pg.conn.QueryContext(ctx, query) + if err != nil { + return nil, &drivers.DatabaseError{ + OrigErr: err, + Driver: driverName, + Message: "failed to fetch applied migrations", + Command: "select_applied_migrations", + Query: []byte(query), + } + } + defer rows.Close() + + for rows.Next() { + if err := rows.Scan(&version, &name); err != nil { + return nil, &drivers.DatabaseError{ + OrigErr: err, + Driver: driverName, + Message: "failed to scan applied migration row", + Command: "scan_applied_migrations", + } + } + + appliedMigrations = append(appliedMigrations, &models.Migration{ + Name: name, + Version: version, + Direction: models.Up, + }) + } + + return appliedMigrations, nil +} + +func (pg *postgres) addMigrationQuery(migration *models.Migration) string { + if migration.Direction == models.Down { + return fmt.Sprintf("DELETE FROM %s WHERE (Version=%d AND NAME='%s')", pg.config.MigrationsTable, migration.Version, migration.Name) + } + return fmt.Sprintf("INSERT INTO %s (version, name) VALUES (%d, '%s')", pg.config.MigrationsTable, migration.Version, migration.Name) +} + +func executeQuery(ctx context.Context, transaction *sql.Tx, query string) error { + if _, err := transaction.ExecContext(ctx, query); err != nil { + if txErr := transaction.Rollback(); txErr != nil { + err = errors.Wrap(errors.New(err.Error()+txErr.Error()), "failed to execute query in migration transaction") + + return &drivers.DatabaseError{ + OrigErr: err, + Driver: driverName, + Command: "rollback_transaction", + } + } + + return &drivers.DatabaseError{ + OrigErr: err, + Driver: driverName, + Message: "failed to execute migration", + Command: "executing_query", + Query: []byte(query), + } + } + + return nil +} + +func currentDatabaseNameFromDB(conn *sql.Conn, config *driverConfig) (string, error) { + query := "SELECT CURRENT_DATABASE()" + + ctx, cancel := drivers.GetContext(config.StatementTimeoutInSecs) + defer cancel() + + var databaseName string + if err := conn.QueryRowContext(ctx, query).Scan(&databaseName); err != nil { + return "", &drivers.DatabaseError{ + OrigErr: err, + Driver: driverName, + Message: "failed to fetch database name", + Command: "current_database", + Query: []byte(query), + } + } + return databaseName, nil +} + +func (pg *postgres) SetConfig(key string, value interface{}) error { + if pg.config != nil { + switch key { + case "StatementTimeoutInSecs": + n, ok := value.(int) + if ok { + pg.config.StatementTimeoutInSecs = n + return nil + } + return fmt.Errorf("incorrect value type for %s", key) + case "MigrationsTable": + n, ok := value.(string) + if ok { + pg.config.MigrationsTable = n + return nil + } + return fmt.Errorf("incorrect value type for %s", key) + } + } + + return fmt.Errorf("incorrect key name %q", key) +} diff --git a/vendor/github.com/mattermost/morph/drivers/postgres/utils.go b/vendor/github.com/mattermost/morph/drivers/postgres/utils.go new file mode 100644 index 00000000..6b55e31d --- /dev/null +++ b/vendor/github.com/mattermost/morph/drivers/postgres/utils.go @@ -0,0 +1,26 @@ +package postgres + +import ( + "net/url" + + "github.com/mattermost/morph/drivers" +) + +func extractDatabaseNameFromURL(URL string) (string, error) { + uri, err := url.Parse(URL) + if err != nil { + return "", err + } + + return uri.Path[1:], nil +} + +func getDefaultConfig() *driverConfig { + return &driverConfig{ + Config: drivers.Config{ + MigrationsTable: "db_migrations", + StatementTimeoutInSecs: 300, + MigrationMaxSize: defaultMigrationMaxSize, + }, + } +} diff --git a/vendor/github.com/mattermost/morph/drivers/sqlite/sqlite.go b/vendor/github.com/mattermost/morph/drivers/sqlite/sqlite.go new file mode 100644 index 00000000..071a985d --- /dev/null +++ b/vendor/github.com/mattermost/morph/drivers/sqlite/sqlite.go @@ -0,0 +1,351 @@ +package sqlite + +import ( + "context" + "database/sql" + "fmt" + "os" + "strconv" + "strings" + "sync/atomic" + + "github.com/pkg/errors" + + "github.com/mattermost/morph/drivers" + "github.com/mattermost/morph/models" + _ "modernc.org/sqlite" +) + +const driverName = "sqlite" +const defaultMigrationMaxSize = 10 * 1 << 20 // 10 MB + +// add here any custom driver configuration +var configParams = []string{ + "x-migration-max-size", + "x-migrations-table", + "x-statement-timeout", +} + +type driverConfig struct { + drivers.Config + closeDBonClose bool +} + +type sqlite struct { + conn *sql.Conn + db *sql.DB + config *driverConfig + + lockedFlag int32 // indicates that the driver is locked or not +} + +func WithInstance(dbInstance *sql.DB) (drivers.Driver, error) { + conn, err := dbInstance.Conn(context.Background()) + if err != nil { + return nil, &drivers.DatabaseError{Driver: driverName, Command: "grabbing_connection", OrigErr: err, Message: "failed to grab connection to the database"} + } + + return &sqlite{config: getDefaultConfig(), conn: conn, db: dbInstance}, nil +} + +func Open(filePath string) (drivers.Driver, error) { + customParams, err := drivers.ExtractCustomParams(filePath, configParams) + if err != nil { + return nil, &drivers.AppError{Driver: driverName, OrigErr: err, Message: "failed to parse custom parameters from url"} + } + + sanitizedConnURL, err := drivers.RemoveParamsFromURL(filePath, configParams) + if err != nil { + return nil, &drivers.AppError{Driver: driverName, OrigErr: err, Message: "failed to sanitize url from custom parameters"} + } + + sanitizedConnURL = strings.TrimSuffix(sanitizedConnURL, "?") + + driverConfig, err := mergeConfigWithParams(customParams, getDefaultConfig()) + if err != nil { + return nil, &drivers.AppError{Driver: driverName, OrigErr: err, Message: "failed to merge custom params to driver config"} + } + + if _, err := os.Stat(sanitizedConnURL); errors.Is(err, os.ErrNotExist) { + return nil, &drivers.AppError{Driver: driverName, OrigErr: err, Message: "failed to open db file"} + } + + db, err := sql.Open(driverName, sanitizedConnURL) + if err != nil { + return nil, &drivers.DatabaseError{Driver: driverName, Command: "opening_connection", OrigErr: err, Message: "failed to open connection with the database"} + } + + conn, err := db.Conn(context.Background()) + if err != nil { + return nil, &drivers.DatabaseError{Driver: driverName, Command: "grabbing_connection", OrigErr: err, Message: "failed to grab connection to the database"} + } + + driverConfig.closeDBonClose = true + + return &sqlite{ + conn: conn, + db: db, + config: driverConfig, + }, nil +} + +func (driver *sqlite) Ping() error { + ctx, cancel := drivers.GetContext(driver.config.StatementTimeoutInSecs) + defer cancel() + + return driver.conn.PingContext(ctx) +} + +func (sqlite) DriverName() string { + return driverName +} + +func (driver *sqlite) Close() error { + if driver.conn != nil { + if err := driver.conn.Close(); err != nil { + return &drivers.DatabaseError{ + OrigErr: err, + Driver: driverName, + Message: "failed to close database connection", + Command: "sqlite_conn_close", + Query: nil, + } + } + } + + if driver.db != nil && driver.config.closeDBonClose { + if err := driver.db.Close(); err != nil { + return &drivers.DatabaseError{ + OrigErr: err, + Driver: driverName, + Message: "failed to close database", + Command: "sqlite_db_close", + Query: nil, + } + } + driver.db = nil + } + + driver.conn = nil + return nil +} + +func (driver *sqlite) lock() error { + if !atomic.CompareAndSwapInt32(&driver.lockedFlag, 0, 1) { + return &drivers.DatabaseError{ + OrigErr: errors.New("already locked"), + Driver: driverName, + Message: "failed to obtain lock", + Command: "lock_driver", + } + } + + return nil +} + +func (driver *sqlite) unlock() error { + atomic.StoreInt32(&driver.lockedFlag, 0) + + return nil +} + +func (driver *sqlite) createSchemaTableIfNotExists() (err error) { + ctx, cancel := drivers.GetContext(driver.config.StatementTimeoutInSecs) + defer cancel() + + createTableIfNotExistsQuery := fmt.Sprintf("CREATE TABLE IF NOT EXISTS %s (Version bigint not null primary key, Name varchar not null)", driver.config.MigrationsTable) + if _, err = driver.conn.ExecContext(ctx, createTableIfNotExistsQuery); err != nil { + return &drivers.DatabaseError{ + OrigErr: err, + Driver: driverName, + Message: "failed while executing query", + Command: "create_migrations_table_if_not_exists", + Query: []byte(createTableIfNotExistsQuery), + } + } + + return nil +} + +func (driver *sqlite) Apply(migration *models.Migration, saveVersion bool) (err error) { + if err = driver.lock(); err != nil { + return err + } + defer func() { + _ = driver.unlock() + }() + + query := migration.Query() + + ctx, cancel := drivers.GetContext(driver.config.StatementTimeoutInSecs) + defer cancel() + + transaction, err := driver.conn.BeginTx(ctx, nil) + if err != nil { + return &drivers.DatabaseError{ + OrigErr: err, + Driver: driverName, + Message: "error while opening a transaction to the database", + Command: "begin_transaction", + } + } + + if err = execTransaction(transaction, query); err != nil { + return err + } + + if saveVersion { + updateVersionQuery := driver.addMigrationQuery(migration) + if err = execTransaction(transaction, updateVersionQuery); err != nil { + return err + } + } + + err = transaction.Commit() + if err != nil { + return &drivers.DatabaseError{ + OrigErr: err, + Driver: driverName, + Message: "error while committing a transaction to the database", + Command: "commit_transaction", + } + } + + return nil +} + +func (driver *sqlite) AppliedMigrations() (migrations []*models.Migration, err error) { + if driver.conn == nil { + return nil, &drivers.AppError{ + OrigErr: errors.New("driver has no connection established"), + Message: "database connection is missing", + Driver: driverName, + } + } + + if err = driver.lock(); err != nil { + return nil, err + } + defer func() { + _ = driver.unlock() + }() + + if err := driver.createSchemaTableIfNotExists(); err != nil { + return nil, err + } + + query := fmt.Sprintf("SELECT version, name FROM %s", driver.config.MigrationsTable) + ctx, cancel := drivers.GetContext(driver.config.StatementTimeoutInSecs) + defer cancel() + var appliedMigrations []*models.Migration + var version uint32 + var name string + + rows, err := driver.conn.QueryContext(ctx, query) + if err != nil { + return nil, &drivers.DatabaseError{ + OrigErr: err, + Driver: driverName, + Message: "failed to fetch applied migrations", + Command: "select_applied_migrations", + Query: []byte(query), + } + } + defer rows.Close() + + for rows.Next() { + if err := rows.Scan(&version, &name); err != nil { + return nil, &drivers.DatabaseError{ + OrigErr: err, + Driver: driverName, + Message: "failed to scan applied migration row", + Command: "scan_applied_migrations", + } + } + + appliedMigrations = append(appliedMigrations, &models.Migration{ + Name: name, + Version: version, + Direction: models.Up, + }) + } + + return appliedMigrations, nil +} + +func mergeConfigWithParams(params map[string]string, config *driverConfig) (*driverConfig, error) { + var err error + + for _, configKey := range configParams { + if v, ok := params[configKey]; ok { + switch configKey { + case "x-migration-max-size": + if config.MigrationMaxSize, err = strconv.Atoi(v); err != nil { + return nil, errors.New(fmt.Sprintf("failed to cast config param %s of %s", configKey, v)) + } + case "x-migrations-table": + config.MigrationsTable = v + case "x-statement-timeout": + if config.StatementTimeoutInSecs, err = strconv.Atoi(v); err != nil { + return nil, errors.New(fmt.Sprintf("failed to cast config param %s of %s", configKey, v)) + } + } + } + } + + return config, nil +} + +func (driver *sqlite) addMigrationQuery(migration *models.Migration) string { + if migration.Direction == models.Down { + return fmt.Sprintf("DELETE FROM %s WHERE (Version=%d AND NAME='%s')", driver.config.MigrationsTable, migration.Version, migration.Name) + } + return fmt.Sprintf("INSERT INTO %s (Version, Name) VALUES (%d, '%s')", driver.config.MigrationsTable, migration.Version, migration.Name) +} + +func (driver *sqlite) SetConfig(key string, value interface{}) error { + if driver.config != nil { + switch key { + case "StatementTimeoutInSecs": + n, ok := value.(int) + if ok { + driver.config.StatementTimeoutInSecs = n + return nil + } + return fmt.Errorf("incorrect value type for %s", key) + case "MigrationsTable": + n, ok := value.(string) + if ok { + driver.config.MigrationsTable = n + return nil + } + return fmt.Errorf("incorrect value type for %s", key) + } + } + + return fmt.Errorf("incorrect key name %q", key) +} + +func execTransaction(transaction *sql.Tx, query string) error { + if _, err := transaction.Exec(query); err != nil { + if txErr := transaction.Rollback(); txErr != nil { + err = errors.Wrap(errors.New(err.Error()+txErr.Error()), "failed to execute query in migration transaction") + + return &drivers.DatabaseError{ + OrigErr: err, + Driver: driverName, + Command: "rollback_transaction", + } + } + + return &drivers.DatabaseError{ + OrigErr: err, + Driver: driverName, + Message: "failed when applying migration", + Command: "apply_migration", + Query: []byte(query), + } + } + + return nil +} diff --git a/vendor/github.com/mattermost/morph/drivers/sqlite/utils.go b/vendor/github.com/mattermost/morph/drivers/sqlite/utils.go new file mode 100644 index 00000000..7faf629e --- /dev/null +++ b/vendor/github.com/mattermost/morph/drivers/sqlite/utils.go @@ -0,0 +1,13 @@ +package sqlite + +import "github.com/mattermost/morph/drivers" + +func getDefaultConfig() *driverConfig { + return &driverConfig{ + Config: drivers.Config{ + MigrationsTable: "db_migrations", + StatementTimeoutInSecs: 300, + MigrationMaxSize: defaultMigrationMaxSize, + }, + } +} diff --git a/vendor/github.com/mattermost/morph/drivers/utils.go b/vendor/github.com/mattermost/morph/drivers/utils.go new file mode 100644 index 00000000..166d1afc --- /dev/null +++ b/vendor/github.com/mattermost/morph/drivers/utils.go @@ -0,0 +1,58 @@ +package drivers + +import ( + "context" + "fmt" + "hash/crc32" + "regexp" + "strings" + "time" +) + +func ExtractCustomParams(conn string, params []string) (map[string]string, error) { + result := make(map[string]string) + for _, param := range params { + reg := regexp.MustCompile(fmt.Sprintf("%s=(\\w+)", param)) + match := reg.FindStringSubmatch(conn) + if len(match) > 1 { + result[param] = match[1] + } + } + + return result, nil +} + +func RemoveParamsFromURL(conn string, params []string) (string, error) { + prefixCorrection := regexp.MustCompile(`\?&+`) + repeatedAmber := regexp.MustCompile("&+") + + for _, param := range params { + reg := regexp.MustCompile(fmt.Sprintf("%s=\\w+", param)) + conn = string(reg.ReplaceAll([]byte(conn), []byte(``))) + } + + parts := strings.Split(conn, "/") + urlParams := parts[len(parts)-1] + + urlParams = string(prefixCorrection.ReplaceAll([]byte(urlParams), []byte(`?`))) + urlParams = string(repeatedAmber.ReplaceAll([]byte(urlParams), []byte(`&`))) + parts[len(parts)-1] = urlParams + + return strings.Join(parts, "/"), nil +} + +const advisoryLockIDSalt uint = 1486364155 + +func GenerateAdvisoryLockID(databaseName, schemaName string) (string, error) { + databaseName = schemaName + databaseName + "\x00" + sum := crc32.ChecksumIEEE([]byte(databaseName)) + sum = sum * uint32(advisoryLockIDSalt) + return fmt.Sprint(sum), nil +} + +func GetContext(timeoutInSeconds int) (context.Context, context.CancelFunc) { + if t := timeoutInSeconds; t > 0 { + return context.WithTimeout(context.Background(), time.Second*time.Duration(t)) + } + return context.WithCancel(context.Background()) +} diff --git a/vendor/github.com/mattermost/morph/models/migration.go b/vendor/github.com/mattermost/morph/models/migration.go new file mode 100644 index 00000000..27d40f31 --- /dev/null +++ b/vendor/github.com/mattermost/morph/models/migration.go @@ -0,0 +1,56 @@ +package models + +import ( + "bytes" + "fmt" + "io" + "strconv" +) + +type Migration struct { + Bytes []byte + Name string + RawName string + Version uint32 + Direction Direction +} + +func NewMigration(migrationBytes io.ReadCloser, fileName string) (*Migration, error) { + m := Regex.FindStringSubmatch(fileName) + + var ( + versionUint64 uint64 + direction Direction + identifier string + err error + ) + + if len(m) == 5 { + versionUint64, err = strconv.ParseUint(m[1], 10, 64) + if err != nil { + return nil, err + } + identifier = m[2] + direction = Direction(m[3]) + } else { + return nil, fmt.Errorf("could not parse file: %s", fileName) + } + + buf := new(bytes.Buffer) + if _, err := buf.ReadFrom(migrationBytes); err != nil { + return nil, err + } + defer migrationBytes.Close() + + return &Migration{ + Version: uint32(versionUint64), + Name: identifier, + RawName: fileName, + Bytes: buf.Bytes(), + Direction: direction, + }, nil +} + +func (m *Migration) Query() string { + return string(m.Bytes) +} diff --git a/vendor/github.com/mattermost/morph/models/parse.go b/vendor/github.com/mattermost/morph/models/parse.go new file mode 100644 index 00000000..cae352f2 --- /dev/null +++ b/vendor/github.com/mattermost/morph/models/parse.go @@ -0,0 +1,24 @@ +package models + +import ( + "fmt" + "regexp" +) + +// Direction is either up or down. +type Direction string + +const ( + Down Direction = "down" + Up Direction = "up" +) + +var ( + ErrParse = fmt.Errorf("no match") +) + +// Regex matches the following pattern: +// +// 123_name.up.ext +// 123_name.down.ext +var Regex = regexp.MustCompile(`^([0-9]+)_(.*)\.(` + string(Down) + `|` + string(Up) + `)\.(.*)$`) diff --git a/vendor/github.com/mattermost/morph/models/plan.go b/vendor/github.com/mattermost/morph/models/plan.go new file mode 100644 index 00000000..2859705a --- /dev/null +++ b/vendor/github.com/mattermost/morph/models/plan.go @@ -0,0 +1,35 @@ +package models + +import "errors" + +const CurrentPlanVersion = 1 + +var ErrInvalidPlanVersion = errors.New("invalid plan version") + +type Plan struct { + // Version is the version of the plan. + Version int + // Auto is the mode of the plan. If true, the plan will rollback automatically in case of an error. + Auto bool + // Migrations is the list of migrations to be applied. + Migrations []*Migration + // RevertMigrations is the list of migrations to be applied in case of an error. + RevertMigrations []*Migration +} + +func NewPlan(migrations, rollback []*Migration, auto bool) *Plan { + return &Plan{ + Version: CurrentPlanVersion, + Migrations: migrations, + RevertMigrations: rollback, + Auto: auto, + } +} + +func (p *Plan) Validate() error { + if p.Version != CurrentPlanVersion { + return ErrInvalidPlanVersion + } + + return nil +} diff --git a/vendor/github.com/mattermost/morph/morph.go b/vendor/github.com/mattermost/morph/morph.go new file mode 100644 index 00000000..f540789b --- /dev/null +++ b/vendor/github.com/mattermost/morph/morph.go @@ -0,0 +1,539 @@ +package morph + +import ( + "context" + "errors" + "fmt" + "log" + "os" + "sort" + "strings" + "sync" + "time" + + "github.com/mattermost/morph/models" + + "github.com/mattermost/morph/drivers" + "github.com/mattermost/morph/sources" + + ms "github.com/mattermost/morph/drivers/mysql" + ps "github.com/mattermost/morph/drivers/postgres" + + _ "github.com/mattermost/morph/sources/embedded" + _ "github.com/mattermost/morph/sources/file" +) + +var ( + migrationProgressStart = "== %s: migrating (%s) =============================================" + migrationProgressFinished = "== %s: migrated (%s) ========================================" + migrationInterceptor = "== %s: running pre-migration function ==================================" +) + +const maxProgressLogLength = 100 + +type Morph struct { + config *Config + driver drivers.Driver + source sources.Source + mutex drivers.Locker + + interceptorLock sync.Mutex + intercecptorsUp map[int]Interceptor + intercecptorsDown map[int]Interceptor +} + +type Config struct { + Logger Logger + LockKey string + DryRun bool +} + +type EngineOption func(*Morph) error + +// Interceptor is a handler function that being called just before the migration +// applied. If the interceptor returns an error, migration will be aborted. +type Interceptor func() error + +func WithLogger(logger Logger) EngineOption { + return func(m *Morph) error { + m.config.Logger = logger + return nil + } +} + +func SetMigrationTableName(name string) EngineOption { + return func(m *Morph) error { + return m.driver.SetConfig("MigrationsTable", name) + } +} + +func SetStatementTimeoutInSeconds(n int) EngineOption { + return func(m *Morph) error { + return m.driver.SetConfig("StatementTimeoutInSecs", n) + } +} + +// WithLock creates a lock table in the database so that the migrations are +// guaranteed to be executed from a single instance. The key is used for naming +// the mutex. +func WithLock(key string) EngineOption { + return func(m *Morph) error { + m.config.LockKey = key + return nil + } +} + +// SetDryRun will not execute any migrations if set to true, but +// will still log the migrations that would be executed. +func SetDryRun(enable bool) EngineOption { + return func(m *Morph) error { + m.config.DryRun = enable + return nil + } +} + +// New creates a new instance of the migrations engine from an existing db instance and a migrations source. +// If the driver implements the Lockable interface, it will also wait until it has acquired a lock. +// The context is propagated to the drivers lock method (if the driver implements divers.Locker interface) and +// it can be used to cancel the lock acquisition. +func New(ctx context.Context, driver drivers.Driver, source sources.Source, options ...EngineOption) (*Morph, error) { + engine := &Morph{ + config: &Config{ + Logger: newColorLogger(log.New(os.Stderr, "", log.LstdFlags)), // add default logger + }, + source: source, + driver: driver, + intercecptorsUp: make(map[int]Interceptor), + intercecptorsDown: make(map[int]Interceptor), + } + + for _, option := range options { + if err := option(engine); err != nil { + return nil, fmt.Errorf("could not apply option: %w", err) + } + } + + if err := driver.Ping(); err != nil { + return nil, err + } + + if impl, ok := driver.(drivers.Lockable); ok && engine.config.LockKey != "" { + var mx drivers.Locker + var err error + switch impl.DriverName() { + case "mysql": + mx, err = ms.NewMutex(engine.config.LockKey, driver, engine.config.Logger) + case "postgres": + mx, err = ps.NewMutex(engine.config.LockKey, driver, engine.config.Logger) + default: + err = errors.New("driver does not support locking") + } + if err != nil { + return nil, err + } + + engine.mutex = mx + err = mx.Lock(ctx) + if err != nil { + return nil, err + } + } + + return engine, nil +} + +// Close closes the underlying database connection of the engine. +func (m *Morph) Close() error { + if m.mutex != nil { + err := m.mutex.Unlock() + if err != nil { + return err + } + } + + return m.driver.Close() +} + +func (m *Morph) apply(migration *models.Migration, saveVersion, dryRun bool) error { + start := time.Now() + migrationName := migration.Name + direction := migration.Direction + f := m.getInterceptor(migration) + if f != nil { + m.config.Logger.Println(formatProgress(fmt.Sprintf(migrationInterceptor, migrationName))) + err := f() + if err != nil { + return err + } + } + m.config.Logger.Println(formatProgress(fmt.Sprintf(migrationProgressStart, migrationName, direction))) + if !dryRun { + if err := m.driver.Apply(migration, saveVersion); err != nil { + return err + } + } + + elapsed := time.Since(start) + m.config.Logger.Println(formatProgress(fmt.Sprintf(migrationProgressFinished, migrationName, fmt.Sprintf("%.4fs", elapsed.Seconds())))) + + return nil +} + +// ApplyAll applies all pending migrations. +func (m *Morph) ApplyAll() error { + _, err := m.Apply(-1) + return err +} + +// Applies limited number of migrations upwards. +func (m *Morph) Apply(limit int) (int, error) { + appliedMigrations, err := m.driver.AppliedMigrations() + if err != nil { + return -1, err + } + + pendingMigrations, err := computePendingMigrations(appliedMigrations, m.source.Migrations()) + if err != nil { + return -1, err + } + + migrations := make([]*models.Migration, 0) + sortedMigrations := sortMigrations(pendingMigrations) + + for _, migration := range sortedMigrations { + if migration.Direction != models.Up { + continue + } + migrations = append(migrations, migration) + } + + steps := limit + if len(migrations) < steps { + return -1, fmt.Errorf("there are only %d migrations available, but you requested %d", len(migrations), steps) + } + + if limit < 0 { + steps = len(migrations) + } + + var applied int + for i := 0; i < steps; i++ { + if err := m.apply(migrations[i], true, m.config.DryRun); err != nil { + return applied, err + } + applied++ + } + + return applied, nil +} + +// ApplyDown rollbacks a limited number of migrations +// if limit is given below zero, all down scripts are going to be applied. +func (m *Morph) ApplyDown(limit int) (int, error) { + appliedMigrations, err := m.driver.AppliedMigrations() + if err != nil { + return -1, err + } + + sortedMigrations := reverseSortMigrations(appliedMigrations) + downMigrations, err := findDownScripts(sortedMigrations, m.source.Migrations()) + if err != nil { + return -1, err + } + + steps := limit + if len(sortedMigrations) < steps { + return -1, fmt.Errorf("there are only %d migrations available, but you requested %d", len(sortedMigrations), steps) + } + + if limit < 0 { + steps = len(sortedMigrations) + } + + var applied int + for i := 0; i < steps; i++ { + migrationName := sortedMigrations[i].Name + if err := m.apply(downMigrations[migrationName], true, m.config.DryRun); err != nil { + return applied, err + } + applied++ + } + + return applied, nil +} + +// Diff returns the difference between the applied migrations and the available migrations. +func (m *Morph) Diff(mode models.Direction) ([]*models.Migration, error) { + appliedMigrations, err := m.driver.AppliedMigrations() + if err != nil { + return nil, err + } + + if mode == models.Down { + sortedMigrations := reverseSortMigrations(appliedMigrations) + downMigrations, err := findDownScripts(sortedMigrations, m.source.Migrations()) + if err != nil { + return nil, err + } + + diff := make([]*models.Migration, 0, len(downMigrations)) + for i := 0; i < len(sortedMigrations); i++ { + diff = append(diff, downMigrations[sortedMigrations[i].Name]) + } + + return diff, nil + } + + pendingMigrations, err := computePendingMigrations(appliedMigrations, m.source.Migrations()) + if err != nil { + return nil, err + } + + var diff []*models.Migration + for _, migration := range sortMigrations(pendingMigrations) { + if migration.Direction != models.Up { + continue + } + diff = append(diff, migration) + } + + return diff, nil +} + +func (m *Morph) GetOppositeMigrations(migrations []*models.Migration) ([]*models.Migration, error) { + var direction models.Direction + migrationsMap := make(map[string]*models.Migration) + for _, migration := range migrations { + if direction == "" { + direction = migration.Direction + } + // check if the migrations has the same direction + if direction != migration.Direction { + return nil, errors.New("migrations have different directions") + } + + migrationsMap[migration.Name] = migration + } + + rollbackMigrations := make([]*models.Migration, 0, len(migrations)) + availableMigrations := m.source.Migrations() + for _, migration := range availableMigrations { + // skip if we have the same direction for the migration + // we are looking for opposite direction + if migration.Direction == direction { + continue + } + + // we don't have the migration in the map + // so we can't rollback it + _, ok := migrationsMap[migration.Name] + if !ok { + continue + } + + rollbackMigrations = append(rollbackMigrations, migration) + } + + if len(migrations) != len(rollbackMigrations) { + return nil, errors.New("not all migrations have opposite migrations") + } + + return rollbackMigrations, nil +} + +// GeneratePlan returns the plan to apply these migrations and also includes +// the safe rollback steps for the given migrations. +func (m *Morph) GeneratePlan(migrations []*models.Migration, auto bool) (*models.Plan, error) { + rollbackMigrations, err := m.GetOppositeMigrations(migrations) + if err != nil { + return nil, fmt.Errorf("could not get opposite migrations: %w", err) + } + + plan := models.NewPlan(migrations, rollbackMigrations, auto) + + return plan, nil +} + +func (m *Morph) ApplyPlan(plan *models.Plan) error { + if err := plan.Validate(); err != nil { + return fmt.Errorf("invalid plan: %w", err) + } + + revertMigrations := make([]*models.Migration, 0, len(plan.RevertMigrations)) + var err error + var failIndex int + + for i := range plan.Migrations { + // add to the revert queue + for _, migration := range plan.RevertMigrations { + if migration.Name == plan.Migrations[i].Name && migration.Version == plan.Migrations[i].Version { + revertMigrations = append(revertMigrations, migration) + break + } + } + + err = m.apply(plan.Migrations[i], true, m.config.DryRun) + if err != nil { + break + } + + failIndex = i + } + + if err == nil { + return nil + } + + if !plan.Auto { + return err + } + + m.config.Logger.Printf("migration %s failed, starting rollback", plan.Migrations[failIndex].Name) + + for j := len(revertMigrations) - 1; j >= 0; j-- { + // There is a special case when we are reverting a rollback + // We shouldn't save the version if we are trying to restore the last applied migration + // here is an example, lets say we have following migrations in the applied migrations table: + // migration_1, migration_2, migration_3 + // Once we initiate the rollback, we will have the following: + // migration_3, migration_2, migration_1 (to rollback) + // Let's say we have a bug in migration_2 and failed. + // We don't remove that version from the database, because migration is not successfully rolled back. + // So in this case, we need to apply the migration_2 (up) but it will be in the migrations table. + // Therefore we are not saving the version in the database because it will fail on the save version step. + skipSave := revertMigrations[j].Direction == models.Up && j == len(revertMigrations)-1 + rErr := m.apply(revertMigrations[j], !skipSave, m.config.DryRun) + if rErr != nil { + return fmt.Errorf("could not rollback migrations after trying to migrate: %w", rErr) + } + + m.config.Logger.Printf("successfully rolled back migration: %s", revertMigrations[j].Name) + } + + // return error in any case + return fmt.Errorf("could not apply migration: %w", err) +} + +// AddInterceptor registers a handler function to be executed before the actual migration +func (m *Morph) AddInterceptor(version int, direction models.Direction, handler Interceptor) { + m.interceptorLock.Lock() + switch direction { + case models.Up: + m.intercecptorsUp[version] = handler + case models.Down: + m.intercecptorsDown[version] = handler + } + m.interceptorLock.Unlock() +} + +// RemoveInterceptor removes the handler function from the engine +func (m *Morph) RemoveInterceptor(version int, direction models.Direction) { + m.interceptorLock.Lock() + switch direction { + case models.Up: + delete(m.intercecptorsUp, version) + case models.Down: + delete(m.intercecptorsDown, version) + } + m.interceptorLock.Unlock() +} + +func (m *Morph) getInterceptor(migration *models.Migration) Interceptor { + m.interceptorLock.Lock() + var f Interceptor + switch migration.Direction { + case models.Up: + fn, ok := m.intercecptorsUp[int(migration.Version)] + if ok { + f = fn + } + case models.Down: + fn, ok := m.intercecptorsDown[int(migration.Version)] + if ok { + f = fn + } + } + m.interceptorLock.Unlock() + return f +} + +// SwapPlanDirection alters the plan direction to the opposite direction. +func SwapPlanDirection(plan *models.Plan) { + // we need to ensure that the intended migrations for applying is in the + // correct order. + plan.RevertMigrations = sortMigrations(plan.RevertMigrations) + if len(plan.RevertMigrations) > 0 && plan.RevertMigrations[0].Direction == models.Down { + plan.RevertMigrations = reverseSortMigrations(plan.RevertMigrations) + } + + // we copy the migrations to set them as revert migrations in the plan + migrations := plan.Migrations + plan.Migrations = plan.RevertMigrations + plan.RevertMigrations = migrations +} + +func reverseSortMigrations(migrations []*models.Migration) []*models.Migration { + sort.Slice(migrations, func(i, j int) bool { + return migrations[i].Version > migrations[j].Version + }) + return migrations +} + +func sortMigrations(migrations []*models.Migration) []*models.Migration { + sort.Slice(migrations, func(i, j int) bool { + return migrations[i].RawName < migrations[j].RawName + }) + return migrations +} + +func computePendingMigrations(appliedMigrations []*models.Migration, sourceMigrations []*models.Migration) ([]*models.Migration, error) { + // sourceMigrations has to be greater or equal to databaseMigrations + if len(appliedMigrations) > len(sourceMigrations) { + return nil, errors.New("migration mismatch, there are more migrations applied than those were specified in source") + } + + dict := make(map[string]*models.Migration) + for _, appliedMigration := range appliedMigrations { + dict[appliedMigration.Name] = appliedMigration + } + + var pendingMigrations []*models.Migration + for _, sourceMigration := range sourceMigrations { + if _, ok := dict[sourceMigration.Name]; !ok { + pendingMigrations = append(pendingMigrations, sourceMigration) + } + } + + return pendingMigrations, nil +} + +func findDownScripts(appliedMigrations []*models.Migration, sourceMigrations []*models.Migration) (map[string]*models.Migration, error) { + tmp := make(map[string]*models.Migration) + for _, m := range sourceMigrations { + if m.Direction != models.Down { + continue + } + tmp[m.Name] = m + } + + for _, m := range appliedMigrations { + _, ok := tmp[m.Name] + if !ok { + return nil, fmt.Errorf("could not find down script for %s", m.Name) + } + } + + return tmp, nil +} + +func formatProgress(p string) string { + if len(p) < maxProgressLogLength { + return p + strings.Repeat("=", maxProgressLogLength-len(p)) + } + + if len(p) > maxProgressLogLength { + return p[:maxProgressLogLength] + } + + return p +} diff --git a/vendor/github.com/mattermost/morph/sources/embedded/README.md b/vendor/github.com/mattermost/morph/sources/embedded/README.md new file mode 100644 index 00000000..5334c5ca --- /dev/null +++ b/vendor/github.com/mattermost/morph/sources/embedded/README.md @@ -0,0 +1,73 @@ +# embedded source + +This source reads migrations from embedded files, for example using +[go-bindata](github.com/go-bindata/go-bindata) or go embed feature. + +## go embed usage + +To read the embedded data, create a migration source through the +`WithInstance` method and then instantiate `morph`: + +```go +import ( + "embed" + "path/filepath" + + "github.com/mattermost/morph" + "github.com/mattermost/morph/sources/embedded" +) + +//go:embed testfiles +var assets embed.FS + +func main() { + dirEntries, err := assets.ReadDir("testfiles") + if err != nil { + panic(err) + } + + assetNames := make([]string, len(dirEntries)) + for i, dirEntry := range dirEntries { + assetNames[i] = dirEntry.Name() + } + + res := embedded.Resource(assetNames, func(name string) ([]byte, error) { + return assets.ReadFile(filepath.Join("testfiles", name)) + }) + + src, err := embedded.WithInstance(res) + if err != nil { + panic(err) + } + + // create the morph instance from the source and driver + m := morph.NewFromConnURL("postgres://...", src, opts) +} +``` + +## go-bindata usage + +To read the embedded data, create a migration source through the +`WithInstance` method and then instantiate `morph`: + +```go +import ( + "github.com/mattermost/morph" + "github.com/mattermost/morph/sources/embedded" + "github.com/mattermost/morph/sources/embedded/testdata" +) + +func main() { + res := embedded.Resource(testdata.AssetNames(), func(name string) ([]byte, error) { + return testdata.Asset(name) + }) + + src, err := embedded.WithInstance(res) + if err != nil { + panic(err) + } + + // create the morph instance from the source and driver + m := morph.NewFromConnURL("postgres://...", src, opts) +} +``` diff --git a/vendor/github.com/mattermost/morph/sources/embedded/embedded.go b/vendor/github.com/mattermost/morph/sources/embedded/embedded.go new file mode 100644 index 00000000..c881431d --- /dev/null +++ b/vendor/github.com/mattermost/morph/sources/embedded/embedded.go @@ -0,0 +1,56 @@ +package embedded + +import ( + "bytes" + "fmt" + "io" + + "github.com/mattermost/morph/models" + "github.com/mattermost/morph/sources" +) + +type AssetFunc func(name string) ([]byte, error) + +func Resource(names []string, fn AssetFunc) *AssetSource { + return &AssetSource{ + Names: names, + AssetFunc: fn, + } +} + +type AssetSource struct { + Names []string + AssetFunc AssetFunc +} + +type Embedded struct { + assetSource *AssetSource + migrations []*models.Migration +} + +func WithInstance(assetSource *AssetSource) (sources.Source, error) { + b := &Embedded{ + assetSource: assetSource, + migrations: []*models.Migration{}, + } + + for _, filename := range assetSource.Names { + migrationBytes, err := b.assetSource.AssetFunc(filename) + if err != nil { + return nil, fmt.Errorf("cannot read migration %q: %w", filename, err) + } + + m, err := models.NewMigration(io.NopCloser(bytes.NewReader(migrationBytes)), filename) + if err != nil { + return nil, fmt.Errorf("could not create migration: %w", err) + } + + b.migrations = append(b.migrations, m) + } + + return b, nil +} + +func (b *Embedded) Migrations() []*models.Migration { + return b.migrations +} diff --git a/vendor/github.com/mattermost/morph/sources/file/file.go b/vendor/github.com/mattermost/morph/sources/file/file.go new file mode 100644 index 00000000..ef96cc8b --- /dev/null +++ b/vendor/github.com/mattermost/morph/sources/file/file.go @@ -0,0 +1,96 @@ +package file + +import ( + "fmt" + "net/url" + "os" + "path/filepath" + + "github.com/mattermost/morph/models" +) + +type File struct { + url string + path string + migrations []*models.Migration +} + +func Open(sourceURL string) (*File, error) { + uri, err := url.Parse(sourceURL) + if err != nil { + return nil, err + } + + // host might be "." for relative URLs like file://./migrations + p := uri.Opaque + if len(p) == 0 { + p = uri.Host + uri.Path + } + + // if no path provided, default to current directory + if len(p) == 0 { + wd, err := os.Getwd() + if err != nil { + return nil, err + } + p = wd + } else if p[0:1] != "/" { + // make path absolute if required + abs, err := filepath.Abs(p) + if err != nil { + return nil, err + } + p = abs + } + + nf := &File{ + url: sourceURL, + path: p, + } + + if err := nf.readMigrations(); err != nil { + return nil, fmt.Errorf("cannot read migrations in path %q: %w", p, err) + } + + return nf, nil +} + +func (f *File) readMigrations() error { + info, err := os.Stat(f.path) + if err != nil { + return err + } + if !info.IsDir() { + return fmt.Errorf("file %q is not a directory", info.Name()) + } + + migrations := []*models.Migration{} + walkerr := filepath.Walk(f.path, func(path string, info os.FileInfo, _ error) error { + if info.IsDir() { + return nil + } + + file, err := os.Open(path) + if err != nil { + return err + } + + m, err := models.NewMigration(file, filepath.Base(path)) + if err != nil { + return fmt.Errorf("could not create migration: %w", err) + } + + migrations = append(migrations, m) + return nil + }) + if walkerr != nil { + return walkerr + } + + f.migrations = migrations + return nil +} + +func (f *File) Migrations() []*models.Migration { + return f.migrations +} diff --git a/vendor/github.com/mattermost/morph/sources/source.go b/vendor/github.com/mattermost/morph/sources/source.go new file mode 100644 index 00000000..b22a14fe --- /dev/null +++ b/vendor/github.com/mattermost/morph/sources/source.go @@ -0,0 +1,9 @@ +package sources + +import ( + "github.com/mattermost/morph/models" +) + +type Source interface { + Migrations() (migrations []*models.Migration) +} diff --git a/vendor/github.com/mattermost/morph/test_helper.go b/vendor/github.com/mattermost/morph/test_helper.go new file mode 100644 index 00000000..40e9ee42 --- /dev/null +++ b/vendor/github.com/mattermost/morph/test_helper.go @@ -0,0 +1,205 @@ +package morph + +import ( + "bytes" + "context" + "database/sql" + "fmt" + "os" + "path/filepath" + "testing" + "text/template" + "time" + + "github.com/mattermost/morph/drivers" + "github.com/mattermost/morph/drivers/mysql" + "github.com/mattermost/morph/drivers/postgres" + "github.com/mattermost/morph/drivers/sqlite" + "github.com/mattermost/morph/models" + "github.com/mattermost/morph/sources" + "github.com/mattermost/morph/testlib" + "github.com/stretchr/testify/require" +) + +const ( + defaultPostgresDSN = "postgres://morph:morph@localhost:6432/morph_test?sslmode=disable" + defaultMySQLDSN = "morph:morph@tcp(127.0.0.1:3307)/morph_test?multiStatements=true" +) + +// query is a map of driver name to a map of direction for the dummy queries +var queries = map[string]map[models.Direction]string{ + "postgres": { + models.Up: `CREATE TABLE IF NOT EXISTS {{.Name}} (id serial PRIMARY KEY, name text)`, + models.Down: `DROP TABLE IF EXISTS {{.Name}}`, + }, + "mysql": { + models.Up: `CREATE TABLE IF NOT EXISTS {{.Name}} (id int(11) NOT NULL AUTO_INCREMENT, name varchar(255), PRIMARY KEY (id))`, + models.Down: `DROP TABLE IF EXISTS {{.Name}}`, + }, + "sqlite": { + models.Up: `CREATE TABLE IF NOT EXISTS {{.Name}} (id integer PRIMARY KEY AUTOINCREMENT, name text)`, + models.Down: `DROP TABLE IF EXISTS {{.Name}}`, + }, +} + +// testHelper is a helper struct for testing morph engine. +// It contains all the necessary information to run tests for all drivers. +// It also provides helper functions to create dummy migrations. +type testHelper struct { + drivers map[string]drivers.Driver + dbInstances map[string]*sql.DB + sqliteFile string + options []EngineOption + migrations map[string][]*models.Migration +} + +// testSource is a dummy source for testing purposes. +type testSource struct { + migrations []*models.Migration +} + +func (s *testSource) Migrations() []*models.Migration { + return s.migrations +} + +// source returns a dummy source for the given driver +func (h *testHelper) source(driverName string) sources.Source { + src := &testSource{ + migrations: h.migrations[driverName], + } + + return src +} + +func newTestHelper(t *testing.T, options ...EngineOption) *testHelper { + helper := &testHelper{ + options: options, + drivers: map[string]drivers.Driver{}, + migrations: map[string][]*models.Migration{}, + dbInstances: map[string]*sql.DB{}, + } + + helper.initializeDrivers(t) + + return helper +} + +// creates 3 new migrations +func (h *testHelper) CreateBasicMigrations(t *testing.T) *testHelper { + h.AddMigration(t, "create_table_1") + h.AddMigration(t, "create_table_2") + h.AddMigration(t, "create_table_3") + + return h +} + +// AddMigration adds a dummy migration to the test helper. It is important to add +// migrations before running the RunForAllDrivers function as migrations are registered +// before the test function is run. +func (h *testHelper) AddMigration(t *testing.T, migrationName string) { + // Just generate a random name + tableName := fmt.Sprintf("test_%s_%d", migrationName, time.Now().Unix()) + for name := range h.drivers { + v := 1 + uint32(len(h.migrations[name])) + h.migrations[name] = append(h.migrations[name], &models.Migration{ + Name: migrationName, + Direction: models.Up, + Version: v, + Bytes: getMigration(t, name, models.Up, tableName), + RawName: fmt.Sprintf("%d_%s.up.sql", v, migrationName), + }) + h.migrations[name] = append(h.migrations[name], &models.Migration{ + Name: migrationName, + Direction: models.Down, + Version: v, + Bytes: getMigration(t, name, models.Down, tableName), + RawName: fmt.Sprintf("%d_%s.down.sql", v, migrationName), + }) + } +} + +// getMigration returns a dummy migration for the given driver and direction +func getMigration(t *testing.T, driver string, direction models.Direction, tableName string) []byte { + tmp, err := template.New("query").Parse(queries[driver][direction]) + require.NoError(t, err) + + var b bytes.Buffer + err = tmp.Execute(&b, struct{ Name string }{Name: tableName}) + require.NoError(t, err) + + return b.Bytes() +} + +// RunForAllDrivers runs the given test function for all drivers of the test helper +func (h *testHelper) RunForAllDrivers(t *testing.T, f func(*testing.T, *Morph), name ...string) { + var testName string + if len(name) > 0 { + testName = name[0] + "/" + } + + for name, driver := range h.drivers { + t.Run(testName+name, func(t *testing.T) { + engine, err := New(context.Background(), driver, h.source(name), h.options...) + require.NoError(t, err) + + f(t, engine) + }) + } +} + +// TearDown closes all database connections and removes all tables from the databases +func (h *testHelper) Teardown(t *testing.T) { + assets := testlib.Assets() + for name, driver := range h.drivers { + b, err := assets.ReadFile(filepath.Join("scripts", name+"_drop_all_tables.sql")) + require.NoError(t, err) + migration := &models.Migration{ + Bytes: b, + } + err = driver.Apply(migration, false) + require.NoError(t, err) + } + + for _, instance := range h.dbInstances { + err := instance.Close() + require.NoError(t, err) + } + + err := os.RemoveAll(h.sqliteFile) + require.NoError(t, err) +} + +func (h *testHelper) initializeDrivers(t *testing.T) { + // postgres + db, err := sql.Open("postgres", defaultPostgresDSN) + require.NoError(t, err) + + pgDriver, err := postgres.WithInstance(db) + require.NoError(t, err) + h.drivers["postgres"] = pgDriver + h.dbInstances["postgres"] = db + + // mysql + db2, err := sql.Open("mysql", defaultMySQLDSN) + require.NoError(t, err) + + mysqlDriver, err := mysql.WithInstance(db2) + require.NoError(t, err) + h.drivers["mysql"] = mysqlDriver + h.dbInstances["mysql"] = db2 + + // sqlite + testDBFile, err := os.CreateTemp("", "morph-test.db") + require.NoError(t, err) + tfInfo, err := testDBFile.Stat() + require.NoError(t, err) + h.sqliteFile = filepath.Join(os.TempDir(), tfInfo.Name()) + + db3, err := sql.Open("sqlite", h.sqliteFile) + require.NoError(t, err) + + sqliteDriver, err := sqlite.WithInstance(db3) + require.NoError(t, err) + h.drivers["sqlite"] = sqliteDriver + h.dbInstances["sqlite"] = db3 +} diff --git a/vendor/github.com/mattermost/morph/testlib/assets.go b/vendor/github.com/mattermost/morph/testlib/assets.go new file mode 100644 index 00000000..193bfd95 --- /dev/null +++ b/vendor/github.com/mattermost/morph/testlib/assets.go @@ -0,0 +1,10 @@ +package testlib + +import "embed" + +//go:embed scripts +var assets embed.FS + +func Assets() embed.FS { + return assets +} diff --git a/vendor/github.com/mattermost/morph/testlib/scripts/mysql_drop_all_tables.sql b/vendor/github.com/mattermost/morph/testlib/scripts/mysql_drop_all_tables.sql new file mode 100644 index 00000000..0d2f2c3a --- /dev/null +++ b/vendor/github.com/mattermost/morph/testlib/scripts/mysql_drop_all_tables.sql @@ -0,0 +1,34 @@ +DROP PROCEDURE IF EXISTS drop_all_tables; + +CREATE PROCEDURE drop_all_tables() +BEGIN + DECLARE _done INT DEFAULT FALSE; + DECLARE _tableName VARCHAR(255); + DECLARE _cursor CURSOR FOR + SELECT table_name + FROM information_schema.TABLES + WHERE table_schema = SCHEMA(); + DECLARE CONTINUE HANDLER FOR NOT FOUND SET _done = TRUE; + + SET FOREIGN_KEY_CHECKS = 0; + + OPEN _cursor; + + REPEAT FETCH _cursor INTO _tableName; + + IF NOT _done THEN + SET @stmt_sql = CONCAT('DROP TABLE ', _tableName); + PREPARE stmt1 FROM @stmt_sql; + EXECUTE stmt1; + DEALLOCATE PREPARE stmt1; + END IF; + + UNTIL _done END REPEAT; + + CLOSE _cursor; + SET FOREIGN_KEY_CHECKS = 1; +END; + +call drop_all_tables(); + +DROP PROCEDURE IF EXISTS drop_all_tables; diff --git a/vendor/github.com/mattermost/morph/testlib/scripts/postgres_drop_all_tables.sql b/vendor/github.com/mattermost/morph/testlib/scripts/postgres_drop_all_tables.sql new file mode 100644 index 00000000..748a617a --- /dev/null +++ b/vendor/github.com/mattermost/morph/testlib/scripts/postgres_drop_all_tables.sql @@ -0,0 +1,7 @@ +DO $$ DECLARE + r RECORD; +BEGIN + FOR r IN (SELECT tablename FROM pg_tables WHERE schemaname = current_schema()) LOOP + EXECUTE 'DROP TABLE IF EXISTS ' || quote_ident(r.tablename) || ' CASCADE'; + END LOOP; +END $$; diff --git a/vendor/github.com/mattermost/morph/testlib/scripts/sqlite_drop_all_tables.sql b/vendor/github.com/mattermost/morph/testlib/scripts/sqlite_drop_all_tables.sql new file mode 100644 index 00000000..cbc1613f --- /dev/null +++ b/vendor/github.com/mattermost/morph/testlib/scripts/sqlite_drop_all_tables.sql @@ -0,0 +1,5 @@ +PRAGMA writable_schema = 1; +delete from sqlite_master where type in ('table', 'index', 'trigger'); +PRAGMA writable_schema = 0; + +PRAGMA INTEGRITY_CHECK; diff --git a/vendor/github.com/mattermost/rsc/LICENSE b/vendor/github.com/mattermost/rsc/LICENSE new file mode 100644 index 00000000..6a66aea5 --- /dev/null +++ b/vendor/github.com/mattermost/rsc/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/mattermost/rsc/gf256/Makefile b/vendor/github.com/mattermost/rsc/gf256/Makefile new file mode 100644 index 00000000..518a034f --- /dev/null +++ b/vendor/github.com/mattermost/rsc/gf256/Makefile @@ -0,0 +1,8 @@ +# Copyright 2010 The Go Authors. All rights reserved. +# Use of this source code is governed by a BSD-style +# license that can be found in the LICENSE file. + +include $(GOROOT)/src/Make.inc +TARG=rsc.googlecode.com/hg/gf256 +GOFILES=gf256.go #rs.go +include $(GOROOT)/src/Make.pkg diff --git a/vendor/github.com/mattermost/rsc/gf256/gf256.go b/vendor/github.com/mattermost/rsc/gf256/gf256.go new file mode 100644 index 00000000..34cc975a --- /dev/null +++ b/vendor/github.com/mattermost/rsc/gf256/gf256.go @@ -0,0 +1,241 @@ +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package gf256 implements arithmetic over the Galois Field GF(256). +package gf256 + +import "strconv" + +// A Field represents an instance of GF(256) defined by a specific polynomial. +type Field struct { + log [256]byte // log[0] is unused + exp [510]byte +} + +// NewField returns a new field corresponding to the polynomial poly +// and generator α. The Reed-Solomon encoding in QR codes uses +// polynomial 0x11d with generator 2. +// +// The choice of generator α only affects the Exp and Log operations. +func NewField(poly, α int) *Field { + if poly < 0x100 || poly >= 0x200 || reducible(poly) { + panic("gf256: invalid polynomial: " + strconv.Itoa(poly)) + } + + var f Field + x := 1 + for i := 0; i < 255; i++ { + if x == 1 && i != 0 { + panic("gf256: invalid generator " + strconv.Itoa(α) + + " for polynomial " + strconv.Itoa(poly)) + } + f.exp[i] = byte(x) + f.exp[i+255] = byte(x) + f.log[x] = byte(i) + x = mul(x, α, poly) + } + f.log[0] = 255 + for i := 0; i < 255; i++ { + if f.log[f.exp[i]] != byte(i) { + panic("bad log") + } + if f.log[f.exp[i+255]] != byte(i) { + panic("bad log") + } + } + for i := 1; i < 256; i++ { + if f.exp[f.log[i]] != byte(i) { + panic("bad log") + } + } + + return &f +} + +// nbit returns the number of significant in p. +func nbit(p int) uint { + n := uint(0) + for ; p > 0; p >>= 1 { + n++ + } + return n +} + +// polyDiv divides the polynomial p by q and returns the remainder. +func polyDiv(p, q int) int { + np := nbit(p) + nq := nbit(q) + for ; np >= nq; np-- { + if p&(1<<(np-1)) != 0 { + p ^= q << (np - nq) + } + } + return p +} + +// mul returns the product x*y mod poly, a GF(256) multiplication. +func mul(x, y, poly int) int { + z := 0 + for x > 0 { + if x&1 != 0 { + z ^= y + } + x >>= 1 + y <<= 1 + if y&0x100 != 0 { + y ^= poly + } + } + return z +} + +// reducible reports whether p is reducible. +func reducible(p int) bool { + // Multiplying n-bit * n-bit produces (2n-1)-bit, + // so if p is reducible, one of its factors must be + // of np/2+1 bits or fewer. + np := nbit(p) + for q := 2; q < 1<<(np/2+1); q++ { + if polyDiv(p, q) == 0 { + return true + } + } + return false +} + +// Add returns the sum of x and y in the field. +func (f *Field) Add(x, y byte) byte { + return x ^ y +} + +// Exp returns the the base-α exponential of e in the field. +// If e < 0, Exp returns 0. +func (f *Field) Exp(e int) byte { + if e < 0 { + return 0 + } + return f.exp[e%255] +} + +// Log returns the base-α logarithm of x in the field. +// If x == 0, Log returns -1. +func (f *Field) Log(x byte) int { + if x == 0 { + return -1 + } + return int(f.log[x]) +} + +// Inv returns the multiplicative inverse of x in the field. +// If x == 0, Inv returns 0. +func (f *Field) Inv(x byte) byte { + if x == 0 { + return 0 + } + return f.exp[255-f.log[x]] +} + +// Mul returns the product of x and y in the field. +func (f *Field) Mul(x, y byte) byte { + if x == 0 || y == 0 { + return 0 + } + return f.exp[int(f.log[x])+int(f.log[y])] +} + +// An RSEncoder implements Reed-Solomon encoding +// over a given field using a given number of error correction bytes. +type RSEncoder struct { + f *Field + c int + gen []byte + lgen []byte + p []byte +} + +func (f *Field) gen(e int) (gen, lgen []byte) { + // p = 1 + p := make([]byte, e+1) + p[e] = 1 + + for i := 0; i < e; i++ { + // p *= (x + Exp(i)) + // p[j] = p[j]*Exp(i) + p[j+1]. + c := f.Exp(i) + for j := 0; j < e; j++ { + p[j] = f.Mul(p[j], c) ^ p[j+1] + } + p[e] = f.Mul(p[e], c) + } + + // lp = log p. + lp := make([]byte, e+1) + for i, c := range p { + if c == 0 { + lp[i] = 255 + } else { + lp[i] = byte(f.Log(c)) + } + } + + return p, lp +} + +// NewRSEncoder returns a new Reed-Solomon encoder +// over the given field and number of error correction bytes. +func NewRSEncoder(f *Field, c int) *RSEncoder { + gen, lgen := f.gen(c) + return &RSEncoder{f: f, c: c, gen: gen, lgen: lgen} +} + +// ECC writes to check the error correcting code bytes +// for data using the given Reed-Solomon parameters. +func (rs *RSEncoder) ECC(data []byte, check []byte) { + if len(check) < rs.c { + panic("gf256: invalid check byte length") + } + if rs.c == 0 { + return + } + + // The check bytes are the remainder after dividing + // data padded with c zeros by the generator polynomial. + + // p = data padded with c zeros. + var p []byte + n := len(data) + rs.c + if len(rs.p) >= n { + p = rs.p + } else { + p = make([]byte, n) + } + copy(p, data) + for i := len(data); i < len(p); i++ { + p[i] = 0 + } + + // Divide p by gen, leaving the remainder in p[len(data):]. + // p[0] is the most significant term in p, and + // gen[0] is the most significant term in the generator, + // which is always 1. + // To avoid repeated work, we store various values as + // lv, not v, where lv = log[v]. + f := rs.f + lgen := rs.lgen[1:] + for i := 0; i < len(data); i++ { + c := p[i] + if c == 0 { + continue + } + q := p[i+1:] + exp := f.exp[f.log[c]:] + for j, lg := range lgen { + if lg != 255 { // lgen uses 255 for log 0 + q[j] ^= exp[lg] + } + } + } + copy(check, p[len(data):]) + rs.p = p +} diff --git a/vendor/github.com/mattermost/rsc/qr/Makefile b/vendor/github.com/mattermost/rsc/qr/Makefile new file mode 100644 index 00000000..d00c470b --- /dev/null +++ b/vendor/github.com/mattermost/rsc/qr/Makefile @@ -0,0 +1,4 @@ +include $(GOROOT)/src/Make.inc +TARG=rsc.googlecode.com/hg/qr +GOFILES=qr.go png.go +include $(GOROOT)/src/Make.pkg diff --git a/vendor/github.com/mattermost/rsc/qr/coding/Makefile b/vendor/github.com/mattermost/rsc/qr/coding/Makefile new file mode 100644 index 00000000..5d1c4d30 --- /dev/null +++ b/vendor/github.com/mattermost/rsc/qr/coding/Makefile @@ -0,0 +1,7 @@ +include $(GOROOT)/src/Make.inc + +TARG=rsc.googlecode.com/hg/qr/coding +GOFILES=\ + qr.go\ + +include $(GOROOT)/src/Make.pkg diff --git a/vendor/github.com/mattermost/rsc/qr/coding/qr.go b/vendor/github.com/mattermost/rsc/qr/coding/qr.go new file mode 100644 index 00000000..35711a4e --- /dev/null +++ b/vendor/github.com/mattermost/rsc/qr/coding/qr.go @@ -0,0 +1,815 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package coding implements low-level QR coding details. +package coding + +import ( + "fmt" + "strconv" + "strings" + + "github.com/mattermost/rsc/gf256" +) + +// Field is the field for QR error correction. +var Field = gf256.NewField(0x11d, 2) + +// A Version represents a QR version. +// The version specifies the size of the QR code: +// a QR code with version v has 4v+17 pixels on a side. +// Versions number from 1 to 40: the larger the version, +// the more information the code can store. +type Version int + +const MinVersion = 1 +const MaxVersion = 40 + +func (v Version) String() string { + return strconv.Itoa(int(v)) +} + +func (v Version) sizeClass() int { + if v <= 9 { + return 0 + } + if v <= 26 { + return 1 + } + return 2 +} + +// DataBytes returns the number of data bytes that can be +// stored in a QR code with the given version and level. +func (v Version) DataBytes(l Level) int { + vt := &vtab[v] + lev := &vt.level[l] + return vt.bytes - lev.nblock*lev.check +} + +// Encoding implements a QR data encoding scheme. +// The implementations--Numeric, Alphanumeric, and String--specify +// the character set and the mapping from UTF-8 to code bits. +// The more restrictive the mode, the fewer code bits are needed. +type Encoding interface { + Check() error + Bits(v Version) int + Encode(b *Bits, v Version) +} + +type Bits struct { + b []byte + nbit int +} + +func (b *Bits) Reset() { + b.b = b.b[:0] + b.nbit = 0 +} + +func (b *Bits) Bits() int { + return b.nbit +} + +func (b *Bits) Bytes() []byte { + if b.nbit%8 != 0 { + panic("fractional byte") + } + return b.b +} + +func (b *Bits) Append(p []byte) { + if b.nbit%8 != 0 { + panic("fractional byte") + } + b.b = append(b.b, p...) + b.nbit += 8 * len(p) +} + +func (b *Bits) Write(v uint, nbit int) { + for nbit > 0 { + n := nbit + if n > 8 { + n = 8 + } + if b.nbit%8 == 0 { + b.b = append(b.b, 0) + } else { + m := -b.nbit & 7 + if n > m { + n = m + } + } + b.nbit += n + sh := uint(nbit - n) + b.b[len(b.b)-1] |= uint8(v >> sh << uint(-b.nbit&7)) + v -= v >> sh << sh + nbit -= n + } +} + +// Num is the encoding for numeric data. +// The only valid characters are the decimal digits 0 through 9. +type Num string + +func (s Num) String() string { + return fmt.Sprintf("Num(%#q)", string(s)) +} + +func (s Num) Check() error { + for _, c := range s { + if c < '0' || '9' < c { + return fmt.Errorf("non-numeric string %#q", string(s)) + } + } + return nil +} + +var numLen = [3]int{10, 12, 14} + +func (s Num) Bits(v Version) int { + return 4 + numLen[v.sizeClass()] + (10*len(s)+2)/3 +} + +func (s Num) Encode(b *Bits, v Version) { + b.Write(1, 4) + b.Write(uint(len(s)), numLen[v.sizeClass()]) + var i int + for i = 0; i+3 <= len(s); i += 3 { + w := uint(s[i]-'0')*100 + uint(s[i+1]-'0')*10 + uint(s[i+2]-'0') + b.Write(w, 10) + } + switch len(s) - i { + case 1: + w := uint(s[i] - '0') + b.Write(w, 4) + case 2: + w := uint(s[i]-'0')*10 + uint(s[i+1]-'0') + b.Write(w, 7) + } +} + +// Alpha is the encoding for alphanumeric data. +// The valid characters are 0-9A-Z$%*+-./: and space. +type Alpha string + +const alphabet = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ $%*+-./:" + +func (s Alpha) String() string { + return fmt.Sprintf("Alpha(%#q)", string(s)) +} + +func (s Alpha) Check() error { + for _, c := range s { + if strings.IndexRune(alphabet, c) < 0 { + return fmt.Errorf("non-alphanumeric string %#q", string(s)) + } + } + return nil +} + +var alphaLen = [3]int{9, 11, 13} + +func (s Alpha) Bits(v Version) int { + return 4 + alphaLen[v.sizeClass()] + (11*len(s)+1)/2 +} + +func (s Alpha) Encode(b *Bits, v Version) { + b.Write(2, 4) + b.Write(uint(len(s)), alphaLen[v.sizeClass()]) + var i int + for i = 0; i+2 <= len(s); i += 2 { + w := uint(strings.IndexRune(alphabet, rune(s[i])))*45 + + uint(strings.IndexRune(alphabet, rune(s[i+1]))) + b.Write(w, 11) + } + + if i < len(s) { + w := uint(strings.IndexRune(alphabet, rune(s[i]))) + b.Write(w, 6) + } +} + +// String is the encoding for 8-bit data. All bytes are valid. +type String string + +func (s String) String() string { + return fmt.Sprintf("String(%#q)", string(s)) +} + +func (s String) Check() error { + return nil +} + +var stringLen = [3]int{8, 16, 16} + +func (s String) Bits(v Version) int { + return 4 + stringLen[v.sizeClass()] + 8*len(s) +} + +func (s String) Encode(b *Bits, v Version) { + b.Write(4, 4) + b.Write(uint(len(s)), stringLen[v.sizeClass()]) + for i := 0; i < len(s); i++ { + b.Write(uint(s[i]), 8) + } +} + +// A Pixel describes a single pixel in a QR code. +type Pixel uint32 + +const ( + Black Pixel = 1 << iota + Invert +) + +func (p Pixel) Offset() uint { + return uint(p >> 6) +} + +func OffsetPixel(o uint) Pixel { + return Pixel(o << 6) +} + +func (r PixelRole) Pixel() Pixel { + return Pixel(r << 2) +} + +func (p Pixel) Role() PixelRole { + return PixelRole(p>>2) & 15 +} + +func (p Pixel) String() string { + s := p.Role().String() + if p&Black != 0 { + s += "+black" + } + if p&Invert != 0 { + s += "+invert" + } + s += "+" + strconv.FormatUint(uint64(p.Offset()), 10) + return s +} + +// A PixelRole describes the role of a QR pixel. +type PixelRole uint32 + +const ( + _ PixelRole = iota + Position // position squares (large) + Alignment // alignment squares (small) + Timing // timing strip between position squares + Format // format metadata + PVersion // version pattern + Unused // unused pixel + Data // data bit + Check // error correction check bit + Extra +) + +var roles = []string{ + "", + "position", + "alignment", + "timing", + "format", + "pversion", + "unused", + "data", + "check", + "extra", +} + +func (r PixelRole) String() string { + if Position <= r && r <= Check { + return roles[r] + } + return strconv.Itoa(int(r)) +} + +// A Level represents a QR error correction level. +// From least to most tolerant of errors, they are L, M, Q, H. +type Level int + +const ( + L Level = iota + M + Q + H +) + +func (l Level) String() string { + if L <= l && l <= H { + return "LMQH"[l : l+1] + } + return strconv.Itoa(int(l)) +} + +// A Code is a square pixel grid. +type Code struct { + Bitmap []byte // 1 is black, 0 is white + Size int // number of pixels on a side + Stride int // number of bytes per row +} + +func (c *Code) Black(x, y int) bool { + return 0 <= x && x < c.Size && 0 <= y && y < c.Size && + c.Bitmap[y*c.Stride+x/8]&(1<= pad { + break + } + b.Write(0x11, 8) + } + } +} + +func (b *Bits) AddCheckBytes(v Version, l Level) { + nd := v.DataBytes(l) + if b.nbit < nd*8 { + b.Pad(nd*8 - b.nbit) + } + if b.nbit != nd*8 { + panic("qr: too much data") + } + + dat := b.Bytes() + vt := &vtab[v] + lev := &vt.level[l] + db := nd / lev.nblock + extra := nd % lev.nblock + chk := make([]byte, lev.check) + rs := gf256.NewRSEncoder(Field, lev.check) + for i := 0; i < lev.nblock; i++ { + if i == lev.nblock-extra { + db++ + } + rs.ECC(dat[:db], chk) + b.Append(chk) + dat = dat[db:] + } + + if len(b.Bytes()) != vt.bytes { + panic("qr: internal error") + } +} + +func (p *Plan) Encode(text ...Encoding) (*Code, error) { + var b Bits + for _, t := range text { + if err := t.Check(); err != nil { + return nil, err + } + t.Encode(&b, p.Version) + } + if b.Bits() > p.DataBytes*8 { + return nil, fmt.Errorf("cannot encode %d bits into %d-bit code", b.Bits(), p.DataBytes*8) + } + b.AddCheckBytes(p.Version, p.Level) + bytes := b.Bytes() + + // Now we have the checksum bytes and the data bytes. + // Construct the actual code. + c := &Code{Size: len(p.Pixel), Stride: (len(p.Pixel) + 7) &^ 7} + c.Bitmap = make([]byte, c.Stride*c.Size) + crow := c.Bitmap + for _, row := range p.Pixel { + for x, pix := range row { + switch pix.Role() { + case Data, Check: + o := pix.Offset() + if bytes[o/8]&(1< 40 { + return nil, fmt.Errorf("invalid QR version %d", int(v)) + } + siz := 17 + int(v)*4 + m := grid(siz) + p.Pixel = m + + // Timing markers (overwritten by boxes). + const ti = 6 // timing is in row/column 6 (counting from 0) + for i := range m { + p := Timing.Pixel() + if i&1 == 0 { + p |= Black + } + m[i][ti] = p + m[ti][i] = p + } + + // Position boxes. + posBox(m, 0, 0) + posBox(m, siz-7, 0) + posBox(m, 0, siz-7) + + // Alignment boxes. + info := &vtab[v] + for x := 4; x+5 < siz; { + for y := 4; y+5 < siz; { + // don't overwrite timing markers + if (x < 7 && y < 7) || (x < 7 && y+5 >= siz-7) || (x+5 >= siz-7 && y < 7) { + } else { + alignBox(m, x, y) + } + if y == 4 { + y = info.apos + } else { + y += info.astride + } + } + if x == 4 { + x = info.apos + } else { + x += info.astride + } + } + + // Version pattern. + pat := vtab[v].pattern + if pat != 0 { + v := pat + for x := 0; x < 6; x++ { + for y := 0; y < 3; y++ { + p := PVersion.Pixel() + if v&1 != 0 { + p |= Black + } + m[siz-11+y][x] = p + m[x][siz-11+y] = p + v >>= 1 + } + } + } + + // One lonely black pixel + m[siz-8][8] = Unused.Pixel() | Black + + return p, nil +} + +// fplan adds the format pixels +func fplan(l Level, m Mask, p *Plan) error { + // Format pixels. + fb := uint32(l^1) << 13 // level: L=01, M=00, Q=11, H=10 + fb |= uint32(m) << 10 // mask + const formatPoly = 0x537 + rem := fb + for i := 14; i >= 10; i-- { + if rem&(1<>i)&1 == 1 { + pix |= Black + } + if (invert>>i)&1 == 1 { + pix ^= Invert | Black + } + // top left + switch { + case i < 6: + p.Pixel[i][8] = pix + case i < 8: + p.Pixel[i+1][8] = pix + case i < 9: + p.Pixel[8][7] = pix + default: + p.Pixel[8][14-i] = pix + } + // bottom right + switch { + case i < 8: + p.Pixel[8][siz-1-int(i)] = pix + default: + p.Pixel[siz-1-int(14-i)][8] = pix + } + } + return nil +} + +// lplan edits a version-only Plan to add information +// about the error correction levels. +func lplan(v Version, l Level, p *Plan) error { + p.Level = l + + nblock := vtab[v].level[l].nblock + ne := vtab[v].level[l].check + nde := (vtab[v].bytes - ne*nblock) / nblock + extra := (vtab[v].bytes - ne*nblock) % nblock + dataBits := (nde*nblock + extra) * 8 + checkBits := ne * nblock * 8 + + p.DataBytes = vtab[v].bytes - ne*nblock + p.CheckBytes = ne * nblock + p.Blocks = nblock + + // Make data + checksum pixels. + data := make([]Pixel, dataBits) + for i := range data { + data[i] = Data.Pixel() | OffsetPixel(uint(i)) + } + check := make([]Pixel, checkBits) + for i := range check { + check[i] = Check.Pixel() | OffsetPixel(uint(i+dataBits)) + } + + // Split into blocks. + dataList := make([][]Pixel, nblock) + checkList := make([][]Pixel, nblock) + for i := 0; i < nblock; i++ { + // The last few blocks have an extra data byte (8 pixels). + nd := nde + if i >= nblock-extra { + nd++ + } + dataList[i], data = data[0:nd*8], data[nd*8:] + checkList[i], check = check[0:ne*8], check[ne*8:] + } + if len(data) != 0 || len(check) != 0 { + panic("data/check math") + } + + // Build up bit sequence, taking first byte of each block, + // then second byte, and so on. Then checksums. + bits := make([]Pixel, dataBits+checkBits) + dst := bits + for i := 0; i < nde+1; i++ { + for _, b := range dataList { + if i*8 < len(b) { + copy(dst, b[i*8:(i+1)*8]) + dst = dst[8:] + } + } + } + for i := 0; i < ne; i++ { + for _, b := range checkList { + if i*8 < len(b) { + copy(dst, b[i*8:(i+1)*8]) + dst = dst[8:] + } + } + } + if len(dst) != 0 { + panic("dst math") + } + + // Sweep up pair of columns, + // then down, assigning to right then left pixel. + // Repeat. + // See Figure 2 of http://www.pclviewer.com/rs2/qrtopology.htm + siz := len(p.Pixel) + rem := make([]Pixel, 7) + for i := range rem { + rem[i] = Extra.Pixel() + } + src := append(bits, rem...) + for x := siz; x > 0; { + for y := siz - 1; y >= 0; y-- { + if p.Pixel[y][x-1].Role() == 0 { + p.Pixel[y][x-1], src = src[0], src[1:] + } + if p.Pixel[y][x-2].Role() == 0 { + p.Pixel[y][x-2], src = src[0], src[1:] + } + } + x -= 2 + if x == 7 { // vertical timing strip + x-- + } + for y := 0; y < siz; y++ { + if p.Pixel[y][x-1].Role() == 0 { + p.Pixel[y][x-1], src = src[0], src[1:] + } + if p.Pixel[y][x-2].Role() == 0 { + p.Pixel[y][x-2], src = src[0], src[1:] + } + } + x -= 2 + } + return nil +} + +// mplan edits a version+level-only Plan to add the mask. +func mplan(m Mask, p *Plan) error { + p.Mask = m + for y, row := range p.Pixel { + for x, pix := range row { + if r := pix.Role(); (r == Data || r == Check || r == Extra) && p.Mask.Invert(y, x) { + row[x] ^= Black | Invert + } + } + } + return nil +} + +// posBox draws a position (large) box at upper left x, y. +func posBox(m [][]Pixel, x, y int) { + pos := Position.Pixel() + // box + for dy := 0; dy < 7; dy++ { + for dx := 0; dx < 7; dx++ { + p := pos + if dx == 0 || dx == 6 || dy == 0 || dy == 6 || 2 <= dx && dx <= 4 && 2 <= dy && dy <= 4 { + p |= Black + } + m[y+dy][x+dx] = p + } + } + // white border + for dy := -1; dy < 8; dy++ { + if 0 <= y+dy && y+dy < len(m) { + if x > 0 { + m[y+dy][x-1] = pos + } + if x+7 < len(m) { + m[y+dy][x+7] = pos + } + } + } + for dx := -1; dx < 8; dx++ { + if 0 <= x+dx && x+dx < len(m) { + if y > 0 { + m[y-1][x+dx] = pos + } + if y+7 < len(m) { + m[y+7][x+dx] = pos + } + } + } +} + +// alignBox draw an alignment (small) box at upper left x, y. +func alignBox(m [][]Pixel, x, y int) { + // box + align := Alignment.Pixel() + for dy := 0; dy < 5; dy++ { + for dx := 0; dx < 5; dx++ { + p := align + if dx == 0 || dx == 4 || dy == 0 || dy == 4 || dx == 2 && dy == 2 { + p |= Black + } + m[y+dy][x+dx] = p + } + } +} diff --git a/vendor/github.com/mattermost/rsc/qr/png.go b/vendor/github.com/mattermost/rsc/qr/png.go new file mode 100644 index 00000000..db49d057 --- /dev/null +++ b/vendor/github.com/mattermost/rsc/qr/png.go @@ -0,0 +1,400 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package qr + +// PNG writer for QR codes. + +import ( + "bytes" + "encoding/binary" + "hash" + "hash/crc32" +) + +// PNG returns a PNG image displaying the code. +// +// PNG uses a custom encoder tailored to QR codes. +// Its compressed size is about 2x away from optimal, +// but it runs about 20x faster than calling png.Encode +// on c.Image(). +func (c *Code) PNG() []byte { + var p pngWriter + return p.encode(c) +} + +type pngWriter struct { + tmp [16]byte + wctmp [4]byte + buf bytes.Buffer + zlib bitWriter + crc hash.Hash32 +} + +var pngHeader = []byte("\x89PNG\r\n\x1a\n") + +func (w *pngWriter) encode(c *Code) []byte { + scale := c.Scale + siz := c.Size + + w.buf.Reset() + + // Header + w.buf.Write(pngHeader) + + // Header block + binary.BigEndian.PutUint32(w.tmp[0:4], uint32((siz+8)*scale)) + binary.BigEndian.PutUint32(w.tmp[4:8], uint32((siz+8)*scale)) + w.tmp[8] = 1 // 1-bit + w.tmp[9] = 0 // gray + w.tmp[10] = 0 + w.tmp[11] = 0 + w.tmp[12] = 0 + w.writeChunk("IHDR", w.tmp[:13]) + + // Comment + w.writeChunk("tEXt", comment) + + // Data + w.zlib.writeCode(c) + w.writeChunk("IDAT", w.zlib.bytes.Bytes()) + + // End + w.writeChunk("IEND", nil) + + return w.buf.Bytes() +} + +var comment = []byte("Software\x00QR-PNG http://qr.swtch.com/") + +func (w *pngWriter) writeChunk(name string, data []byte) { + if w.crc == nil { + w.crc = crc32.NewIEEE() + } + binary.BigEndian.PutUint32(w.wctmp[0:4], uint32(len(data))) + w.buf.Write(w.wctmp[0:4]) + w.crc.Reset() + copy(w.wctmp[0:4], name) + w.buf.Write(w.wctmp[0:4]) + w.crc.Write(w.wctmp[0:4]) + w.buf.Write(data) + w.crc.Write(data) + crc := w.crc.Sum32() + binary.BigEndian.PutUint32(w.wctmp[0:4], crc) + w.buf.Write(w.wctmp[0:4]) +} + +func (b *bitWriter) writeCode(c *Code) { + const ftNone = 0 + + b.adler32.Reset() + b.bytes.Reset() + b.nbit = 0 + + scale := c.Scale + siz := c.Size + + // zlib header + b.tmp[0] = 0x78 + b.tmp[1] = 0 + b.tmp[1] += uint8(31 - (uint16(b.tmp[0])<<8+uint16(b.tmp[1]))%31) + b.bytes.Write(b.tmp[0:2]) + + // Start flate block. + b.writeBits(1, 1, false) // final block + b.writeBits(1, 2, false) // compressed, fixed Huffman tables + + // White border. + // First row. + b.byte(ftNone) + n := (scale*(siz+8) + 7) / 8 + b.byte(255) + b.repeat(n-1, 1) + // 4*scale rows total. + b.repeat((4*scale-1)*(1+n), 1+n) + + for i := 0; i < 4*scale; i++ { + b.adler32.WriteNByte(ftNone, 1) + b.adler32.WriteNByte(255, n) + } + + row := make([]byte, 1+n) + for y := 0; y < siz; y++ { + row[0] = ftNone + j := 1 + var z uint8 + nz := 0 + for x := -4; x < siz+4; x++ { + // Raw data. + for i := 0; i < scale; i++ { + z <<= 1 + if !c.Black(x, y) { + z |= 1 + } + if nz++; nz == 8 { + row[j] = z + j++ + nz = 0 + } + } + } + if j < len(row) { + row[j] = z + } + for _, z := range row { + b.byte(z) + } + + // Scale-1 copies. + b.repeat((scale-1)*(1+n), 1+n) + + b.adler32.WriteN(row, scale) + } + + // White border. + // First row. + b.byte(ftNone) + b.byte(255) + b.repeat(n-1, 1) + // 4*scale rows total. + b.repeat((4*scale-1)*(1+n), 1+n) + + for i := 0; i < 4*scale; i++ { + b.adler32.WriteNByte(ftNone, 1) + b.adler32.WriteNByte(255, n) + } + + // End of block. + b.hcode(256) + b.flushBits() + + // adler32 + binary.BigEndian.PutUint32(b.tmp[0:], b.adler32.Sum32()) + b.bytes.Write(b.tmp[0:4]) +} + +// A bitWriter is a write buffer for bit-oriented data like deflate. +type bitWriter struct { + bytes bytes.Buffer + bit uint32 + nbit uint + + tmp [4]byte + adler32 adigest +} + +func (b *bitWriter) writeBits(bit uint32, nbit uint, rev bool) { + // reverse, for huffman codes + if rev { + br := uint32(0) + for i := uint(0); i < nbit; i++ { + br |= ((bit >> i) & 1) << (nbit - 1 - i) + } + bit = br + } + b.bit |= bit << b.nbit + b.nbit += nbit + for b.nbit >= 8 { + b.bytes.WriteByte(byte(b.bit)) + b.bit >>= 8 + b.nbit -= 8 + } +} + +func (b *bitWriter) flushBits() { + if b.nbit > 0 { + b.bytes.WriteByte(byte(b.bit)) + b.nbit = 0 + b.bit = 0 + } +} + +func (b *bitWriter) hcode(v int) { + /* + Lit Value Bits Codes + --------- ---- ----- + 0 - 143 8 00110000 through + 10111111 + 144 - 255 9 110010000 through + 111111111 + 256 - 279 7 0000000 through + 0010111 + 280 - 287 8 11000000 through + 11000111 + */ + switch { + case v <= 143: + b.writeBits(uint32(v)+0x30, 8, true) + case v <= 255: + b.writeBits(uint32(v-144)+0x190, 9, true) + case v <= 279: + b.writeBits(uint32(v-256)+0, 7, true) + case v <= 287: + b.writeBits(uint32(v-280)+0xc0, 8, true) + default: + panic("invalid hcode") + } +} + +func (b *bitWriter) byte(x byte) { + b.hcode(int(x)) +} + +func (b *bitWriter) codex(c int, val int, nx uint) { + b.hcode(c + val>>nx) + b.writeBits(uint32(val)&(1<= 258+3; n -= 258 { + b.repeat1(258, d) + } + if n > 258 { + // 258 < n < 258+3 + b.repeat1(10, d) + b.repeat1(n-10, d) + return + } + if n < 3 { + panic("invalid flate repeat") + } + b.repeat1(n, d) +} + +func (b *bitWriter) repeat1(n, d int) { + /* + Extra Extra Extra + Code Bits Length(s) Code Bits Lengths Code Bits Length(s) + ---- ---- ------ ---- ---- ------- ---- ---- ------- + 257 0 3 267 1 15,16 277 4 67-82 + 258 0 4 268 1 17,18 278 4 83-98 + 259 0 5 269 2 19-22 279 4 99-114 + 260 0 6 270 2 23-26 280 4 115-130 + 261 0 7 271 2 27-30 281 5 131-162 + 262 0 8 272 2 31-34 282 5 163-194 + 263 0 9 273 3 35-42 283 5 195-226 + 264 0 10 274 3 43-50 284 5 227-257 + 265 1 11,12 275 3 51-58 285 0 258 + 266 1 13,14 276 3 59-66 + */ + switch { + case n <= 10: + b.codex(257, n-3, 0) + case n <= 18: + b.codex(265, n-11, 1) + case n <= 34: + b.codex(269, n-19, 2) + case n <= 66: + b.codex(273, n-35, 3) + case n <= 130: + b.codex(277, n-67, 4) + case n <= 257: + b.codex(281, n-131, 5) + case n == 258: + b.hcode(285) + default: + panic("invalid repeat length") + } + + /* + Extra Extra Extra + Code Bits Dist Code Bits Dist Code Bits Distance + ---- ---- ---- ---- ---- ------ ---- ---- -------- + 0 0 1 10 4 33-48 20 9 1025-1536 + 1 0 2 11 4 49-64 21 9 1537-2048 + 2 0 3 12 5 65-96 22 10 2049-3072 + 3 0 4 13 5 97-128 23 10 3073-4096 + 4 1 5,6 14 6 129-192 24 11 4097-6144 + 5 1 7,8 15 6 193-256 25 11 6145-8192 + 6 2 9-12 16 7 257-384 26 12 8193-12288 + 7 2 13-16 17 7 385-512 27 12 12289-16384 + 8 3 17-24 18 8 513-768 28 13 16385-24576 + 9 3 25-32 19 8 769-1024 29 13 24577-32768 + */ + if d <= 4 { + b.writeBits(uint32(d-1), 5, true) + } else if d <= 32768 { + nbit := uint(16) + for d <= 1<<(nbit-1) { + nbit-- + } + v := uint32(d - 1) + v &^= 1 << (nbit - 1) // top bit is implicit + code := uint32(2*nbit - 2) // second bit is low bit of code + code |= v >> (nbit - 2) + v &^= 1 << (nbit - 2) + b.writeBits(code, 5, true) + // rest of bits follow + b.writeBits(uint32(v), nbit-2, false) + } else { + panic("invalid repeat distance") + } +} + +func (b *bitWriter) run(v byte, n int) { + if n == 0 { + return + } + b.byte(v) + if n-1 < 3 { + for i := 0; i < n-1; i++ { + b.byte(v) + } + } else { + b.repeat(n-1, 1) + } +} + +type adigest struct { + a, b uint32 +} + +func (d *adigest) Reset() { d.a, d.b = 1, 0 } + +const amod = 65521 + +func aupdate(a, b uint32, pi byte, n int) (aa, bb uint32) { + // TODO(rsc): 6g doesn't do magic multiplies for b %= amod, + // only for b = b%amod. + + // invariant: a, b < amod + if pi == 0 { + b += uint32(n%amod) * a + b = b % amod + return a, b + } + + // n times: + // a += pi + // b += a + // is same as + // b += n*a + n*(n+1)/2*pi + // a += n*pi + m := uint32(n) + b += (m % amod) * a + b = b % amod + b += (m * (m + 1) / 2) % amod * uint32(pi) + b = b % amod + a += (m % amod) * uint32(pi) + a = a % amod + return a, b +} + +func afinish(a, b uint32) uint32 { + return b<<16 | a +} + +func (d *adigest) WriteN(p []byte, n int) { + for i := 0; i < n; i++ { + for _, pi := range p { + d.a, d.b = aupdate(d.a, d.b, pi, 1) + } + } +} + +func (d *adigest) WriteNByte(pi byte, n int) { + d.a, d.b = aupdate(d.a, d.b, pi, n) +} + +func (d *adigest) Sum32() uint32 { return afinish(d.a, d.b) } diff --git a/vendor/github.com/mattermost/rsc/qr/qr.go b/vendor/github.com/mattermost/rsc/qr/qr.go new file mode 100644 index 00000000..1d20d02f --- /dev/null +++ b/vendor/github.com/mattermost/rsc/qr/qr.go @@ -0,0 +1,116 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +/* +Package qr encodes QR codes. +*/ +package qr + +import ( + "errors" + "image" + "image/color" + + "github.com/mattermost/rsc/qr/coding" +) + +// A Level denotes a QR error correction level. +// From least to most tolerant of errors, they are L, M, Q, H. +type Level int + +const ( + L Level = iota // 20% redundant + M // 38% redundant + Q // 55% redundant + H // 65% redundant +) + +// Encode returns an encoding of text at the given error correction level. +func Encode(text string, level Level) (*Code, error) { + // Pick data encoding, smallest first. + // We could split the string and use different encodings + // but that seems like overkill for now. + var enc coding.Encoding + switch { + case coding.Num(text).Check() == nil: + enc = coding.Num(text) + case coding.Alpha(text).Check() == nil: + enc = coding.Alpha(text) + default: + enc = coding.String(text) + } + + // Pick size. + l := coding.Level(level) + var v coding.Version + for v = coding.MinVersion; ; v++ { + if v > coding.MaxVersion { + return nil, errors.New("text too long to encode as QR") + } + if enc.Bits(v) <= v.DataBytes(l)*8 { + break + } + } + + // Build and execute plan. + p, err := coding.NewPlan(v, l, 0) + if err != nil { + return nil, err + } + cc, err := p.Encode(enc) + if err != nil { + return nil, err + } + + // TODO: Pick appropriate mask. + + return &Code{cc.Bitmap, cc.Size, cc.Stride, 8}, nil +} + +// A Code is a square pixel grid. +// It implements image.Image and direct PNG encoding. +type Code struct { + Bitmap []byte // 1 is black, 0 is white + Size int // number of pixels on a side + Stride int // number of bytes per row + Scale int // number of image pixels per QR pixel +} + +// Black returns true if the pixel at (x,y) is black. +func (c *Code) Black(x, y int) bool { + return 0 <= x && x < c.Size && 0 <= y && y < c.Size && + c.Bitmap[y*c.Stride+x/8]&(1< 0 { + users = users.Where("name LIKE ?", fmt.Sprint("%", q, "%")) +} +``` + +Squirrel wants to make your life easier: + +```go +// StmtCache caches Prepared Stmts for you +dbCache := sq.NewStmtCache(db) + +// StatementBuilder keeps your syntax neat +mydb := sq.StatementBuilder.RunWith(dbCache) +select_users := mydb.Select("*").From("users") +``` + +Squirrel loves PostgreSQL: + +```go +psql := sq.StatementBuilder.PlaceholderFormat(sq.Dollar) + +// You use question marks for placeholders... +sql, _, _ := psql.Select("*").From("elephants").Where("name IN (?,?)", "Dumbo", "Verna").ToSql() + +/// ...squirrel replaces them using PlaceholderFormat. +sql == "SELECT * FROM elephants WHERE name IN ($1,$2)" + + +/// You can retrieve id ... +query := sq.Insert("nodes"). + Columns("uuid", "type", "data"). + Values(node.Uuid, node.Type, node.Data). + Suffix("RETURNING \"id\""). + RunWith(m.db). + PlaceholderFormat(sq.Dollar) + +query.QueryRow().Scan(&node.id) +``` + +You can escape question marks by inserting two question marks: + +```sql +SELECT * FROM nodes WHERE meta->'format' ??| array[?,?] +``` + +will generate with the Dollar Placeholder: + +```sql +SELECT * FROM nodes WHERE meta->'format' ?| array[$1,$2] +``` + +## FAQ + +* **How can I build an IN query on composite keys / tuples, e.g. `WHERE (col1, col2) IN ((1,2),(3,4))`? ([#104](https://github.com/Masterminds/squirrel/issues/104))** + + Squirrel does not explicitly support tuples, but you can get the same effect with e.g.: + + ```go + sq.Or{ + sq.Eq{"col1": 1, "col2": 2}, + sq.Eq{"col1": 3, "col2": 4}} + ``` + + ```sql + WHERE (col1 = 1 AND col2 = 2) OR (col1 = 3 AND col2 = 4) + ``` + + (which should produce the same query plan as the tuple version) + +* **Why doesn't `Eq{"mynumber": []uint8{1,2,3}}` turn into an `IN` query? ([#114](https://github.com/Masterminds/squirrel/issues/114))** + + Values of type `[]byte` are handled specially by `database/sql`. In Go, [`byte` is just an alias of `uint8`](https://golang.org/pkg/builtin/#byte), so there is no way to distinguish `[]uint8` from `[]byte`. + +* **Some features are poorly documented!** + + This isn't a frequent complaints section! + +* **Some features are poorly documented?** + + Yes. The tests should be considered a part of the documentation; take a look at those for ideas on how to express more complex queries. + +## License + +Squirrel is released under the +[MIT License](http://www.opensource.org/licenses/MIT). diff --git a/vendor/github.com/mattermost/squirrel/case.go b/vendor/github.com/mattermost/squirrel/case.go new file mode 100644 index 00000000..299e14b9 --- /dev/null +++ b/vendor/github.com/mattermost/squirrel/case.go @@ -0,0 +1,128 @@ +package squirrel + +import ( + "bytes" + "errors" + + "github.com/lann/builder" +) + +func init() { + builder.Register(CaseBuilder{}, caseData{}) +} + +// sqlizerBuffer is a helper that allows to write many Sqlizers one by one +// without constant checks for errors that may come from Sqlizer +type sqlizerBuffer struct { + bytes.Buffer + args []interface{} + err error +} + +// WriteSql converts Sqlizer to SQL strings and writes it to buffer +func (b *sqlizerBuffer) WriteSql(item Sqlizer) { + if b.err != nil { + return + } + + var str string + var args []interface{} + str, args, b.err = nestedToSql(item) + + if b.err != nil { + return + } + + b.WriteString(str) + b.WriteByte(' ') + b.args = append(b.args, args...) +} + +func (b *sqlizerBuffer) ToSql() (string, []interface{}, error) { + return b.String(), b.args, b.err +} + +// whenPart is a helper structure to describe SQLs "WHEN ... THEN ..." expression +type whenPart struct { + when Sqlizer + then Sqlizer +} + +func newWhenPart(when interface{}, then interface{}) whenPart { + return whenPart{newPart(when), newPart(then)} +} + +// caseData holds all the data required to build a CASE SQL construct +type caseData struct { + What Sqlizer + WhenParts []whenPart + Else Sqlizer +} + +// ToSql implements Sqlizer +func (d *caseData) ToSql() (sqlStr string, args []interface{}, err error) { + if len(d.WhenParts) == 0 { + err = errors.New("case expression must contain at lease one WHEN clause") + + return + } + + sql := sqlizerBuffer{} + + sql.WriteString("CASE ") + if d.What != nil { + sql.WriteSql(d.What) + } + + for _, p := range d.WhenParts { + sql.WriteString("WHEN ") + sql.WriteSql(p.when) + sql.WriteString("THEN ") + sql.WriteSql(p.then) + } + + if d.Else != nil { + sql.WriteString("ELSE ") + sql.WriteSql(d.Else) + } + + sql.WriteString("END") + + return sql.ToSql() +} + +// CaseBuilder builds SQL CASE construct which could be used as parts of queries. +type CaseBuilder builder.Builder + +// ToSql builds the query into a SQL string and bound args. +func (b CaseBuilder) ToSql() (string, []interface{}, error) { + data := builder.GetStruct(b).(caseData) + return data.ToSql() +} + +// MustSql builds the query into a SQL string and bound args. +// It panics if there are any errors. +func (b CaseBuilder) MustSql() (string, []interface{}) { + sql, args, err := b.ToSql() + if err != nil { + panic(err) + } + return sql, args +} + +// what sets optional value for CASE construct "CASE [value] ..." +func (b CaseBuilder) what(expr interface{}) CaseBuilder { + return builder.Set(b, "What", newPart(expr)).(CaseBuilder) +} + +// When adds "WHEN ... THEN ..." part to CASE construct +func (b CaseBuilder) When(when interface{}, then interface{}) CaseBuilder { + // TODO: performance hint: replace slice of WhenPart with just slice of parts + // where even indices of the slice belong to "when"s and odd indices belong to "then"s + return builder.Append(b, "WhenParts", newWhenPart(when, then)).(CaseBuilder) +} + +// What sets optional "ELSE ..." part for CASE construct +func (b CaseBuilder) Else(expr interface{}) CaseBuilder { + return builder.Set(b, "Else", newPart(expr)).(CaseBuilder) +} diff --git a/vendor/github.com/mattermost/squirrel/delete.go b/vendor/github.com/mattermost/squirrel/delete.go new file mode 100644 index 00000000..8b0dc37c --- /dev/null +++ b/vendor/github.com/mattermost/squirrel/delete.go @@ -0,0 +1,255 @@ +package squirrel + +import ( + "bytes" + "database/sql" + "fmt" + "strings" + + "github.com/lann/builder" +) + +type deleteData struct { + PlaceholderFormat PlaceholderFormat + RunWith BaseRunner + Prefixes []Sqlizer + What []string + From string + Joins []Sqlizer + Usings []string + WhereParts []Sqlizer + OrderBys []string + Limit string + Offset string + Suffixes []Sqlizer +} + +func (d *deleteData) Exec() (sql.Result, error) { + if d.RunWith == nil { + return nil, RunnerNotSet + } + return ExecWith(d.RunWith, d) +} + +func (d *deleteData) ToSql() (sqlStr string, args []interface{}, err error) { + if len(d.From) == 0 { + err = fmt.Errorf("delete statements must specify a From table") + return + } + + sql := &bytes.Buffer{} + + if len(d.Prefixes) > 0 { + args, err = appendToSql(d.Prefixes, sql, " ", args) + if err != nil { + return + } + + sql.WriteString(" ") + } + + sql.WriteString("DELETE") + + if len(d.What) > 0 { + sql.WriteString(" ") + sql.WriteString(strings.Join(d.What, ", ")) + } + + sql.WriteString(" FROM ") + sql.WriteString(d.From) + + if len(d.Usings) > 0 { + sql.WriteString(" USING ") + sql.WriteString(strings.Join(d.Usings, ", ")) + } + + if len(d.Joins) > 0 { + sql.WriteString(" ") + args, err = appendToSql(d.Joins, sql, " ", args) + if err != nil { + return + } + } + + if len(d.WhereParts) > 0 { + sql.WriteString(" WHERE ") + args, err = appendToSql(d.WhereParts, sql, " AND ", args) + if err != nil { + return + } + } + + if len(d.OrderBys) > 0 { + sql.WriteString(" ORDER BY ") + sql.WriteString(strings.Join(d.OrderBys, ", ")) + } + + if len(d.Limit) > 0 { + sql.WriteString(" LIMIT ") + sql.WriteString(d.Limit) + } + + if len(d.Offset) > 0 { + sql.WriteString(" OFFSET ") + sql.WriteString(d.Offset) + } + + if len(d.Suffixes) > 0 { + sql.WriteString(" ") + args, err = appendToSql(d.Suffixes, sql, " ", args) + if err != nil { + return + } + } + + sqlStr, err = d.PlaceholderFormat.ReplacePlaceholders(sql.String()) + return +} + +// Builder + +// DeleteBuilder builds SQL DELETE statements. +type DeleteBuilder builder.Builder + +func init() { + builder.Register(DeleteBuilder{}, deleteData{}) +} + +// Format methods + +// PlaceholderFormat sets PlaceholderFormat (e.g. Question or Dollar) for the +// query. +func (b DeleteBuilder) PlaceholderFormat(f PlaceholderFormat) DeleteBuilder { + return builder.Set(b, "PlaceholderFormat", f).(DeleteBuilder) +} + +// Runner methods + +// RunWith sets a Runner (like database/sql.DB) to be used with e.g. Exec. +func (b DeleteBuilder) RunWith(runner BaseRunner) DeleteBuilder { + return setRunWith(b, runner).(DeleteBuilder) +} + +// Exec builds and Execs the query with the Runner set by RunWith. +func (b DeleteBuilder) Exec() (sql.Result, error) { + data := builder.GetStruct(b).(deleteData) + return data.Exec() +} + +// SQL methods + +// ToSql builds the query into a SQL string and bound args. +func (b DeleteBuilder) ToSql() (string, []interface{}, error) { + data := builder.GetStruct(b).(deleteData) + return data.ToSql() +} + +// MustSql builds the query into a SQL string and bound args. +// It panics if there are any errors. +func (b DeleteBuilder) MustSql() (string, []interface{}) { + sql, args, err := b.ToSql() + if err != nil { + panic(err) + } + return sql, args +} + +// Prefix adds an expression to the beginning of the query +func (b DeleteBuilder) Prefix(sql string, args ...interface{}) DeleteBuilder { + return b.PrefixExpr(Expr(sql, args...)) +} + +// PrefixExpr adds an expression to the very beginning of the query +func (b DeleteBuilder) PrefixExpr(expr Sqlizer) DeleteBuilder { + return builder.Append(b, "Prefixes", expr).(DeleteBuilder) +} + +// What injects something between DELETE and FROM in the query, allowing the MySQL specific +// multi-table syntax, itself often useful with joins. +func (b DeleteBuilder) What(what ...string) DeleteBuilder { + return builder.Extend(b, "What", what).(DeleteBuilder) +} + +// From sets the table to be deleted from. +func (b DeleteBuilder) From(from string) DeleteBuilder { + return builder.Set(b, "From", from).(DeleteBuilder) +} + +// Using adds USING expressions to the query. +func (b DeleteBuilder) Using(usings ...string) DeleteBuilder { + return builder.Extend(b, "Usings", usings).(DeleteBuilder) +} + +// JoinClause adds a join clause to the query. +func (b DeleteBuilder) JoinClause(pred interface{}, args ...interface{}) DeleteBuilder { + return builder.Append(b, "Joins", newPart(pred, args...)).(DeleteBuilder) +} + +// Join adds a JOIN clause to the query. +func (b DeleteBuilder) Join(join string, rest ...interface{}) DeleteBuilder { + return b.JoinClause("JOIN "+join, rest...) +} + +// LeftJoin adds a LEFT JOIN clause to the query. +func (b DeleteBuilder) LeftJoin(join string, rest ...interface{}) DeleteBuilder { + return b.JoinClause("LEFT JOIN "+join, rest...) +} + +// RightJoin adds a RIGHT JOIN clause to the query. +func (b DeleteBuilder) RightJoin(join string, rest ...interface{}) DeleteBuilder { + return b.JoinClause("RIGHT JOIN "+join, rest...) +} + +// InnerJoin adds a INNER JOIN clause to the query. +func (b DeleteBuilder) InnerJoin(join string, rest ...interface{}) DeleteBuilder { + return b.JoinClause("INNER JOIN "+join, rest...) +} + +// CrossJoin adds a CROSS JOIN clause to the query. +func (b DeleteBuilder) CrossJoin(join string, rest ...interface{}) DeleteBuilder { + return b.JoinClause("CROSS JOIN "+join, rest...) +} + +// Where adds WHERE expressions to the query. +// +// See SelectBuilder.Where for more information. +func (b DeleteBuilder) Where(pred interface{}, args ...interface{}) DeleteBuilder { + return builder.Append(b, "WhereParts", newWherePart(pred, args...)).(DeleteBuilder) +} + +// OrderBy adds ORDER BY expressions to the query. +func (b DeleteBuilder) OrderBy(orderBys ...string) DeleteBuilder { + return builder.Extend(b, "OrderBys", orderBys).(DeleteBuilder) +} + +// Limit sets a LIMIT clause on the query. +func (b DeleteBuilder) Limit(limit uint64) DeleteBuilder { + return builder.Set(b, "Limit", fmt.Sprintf("%d", limit)).(DeleteBuilder) +} + +// Offset sets a OFFSET clause on the query. +func (b DeleteBuilder) Offset(offset uint64) DeleteBuilder { + return builder.Set(b, "Offset", fmt.Sprintf("%d", offset)).(DeleteBuilder) +} + +// Suffix adds an expression to the end of the query +func (b DeleteBuilder) Suffix(sql string, args ...interface{}) DeleteBuilder { + return b.SuffixExpr(Expr(sql, args...)) +} + +// SuffixExpr adds an expression to the end of the query +func (b DeleteBuilder) SuffixExpr(expr Sqlizer) DeleteBuilder { + return builder.Append(b, "Suffixes", expr).(DeleteBuilder) +} + +func (b DeleteBuilder) Query() (*sql.Rows, error) { + data := builder.GetStruct(b).(deleteData) + return data.Query() +} + +func (d *deleteData) Query() (*sql.Rows, error) { + if d.RunWith == nil { + return nil, RunnerNotSet + } + return QueryWith(d.RunWith, d) +} diff --git a/vendor/github.com/mattermost/squirrel/delete_ctx.go b/vendor/github.com/mattermost/squirrel/delete_ctx.go new file mode 100644 index 00000000..de83c55d --- /dev/null +++ b/vendor/github.com/mattermost/squirrel/delete_ctx.go @@ -0,0 +1,69 @@ +// +build go1.8 + +package squirrel + +import ( + "context" + "database/sql" + + "github.com/lann/builder" +) + +func (d *deleteData) ExecContext(ctx context.Context) (sql.Result, error) { + if d.RunWith == nil { + return nil, RunnerNotSet + } + ctxRunner, ok := d.RunWith.(ExecerContext) + if !ok { + return nil, NoContextSupport + } + return ExecContextWith(ctx, ctxRunner, d) +} + +func (d *deleteData) QueryContext(ctx context.Context) (*sql.Rows, error) { + if d.RunWith == nil { + return nil, RunnerNotSet + } + ctxRunner, ok := d.RunWith.(QueryerContext) + if !ok { + return nil, NoContextSupport + } + return QueryContextWith(ctx, ctxRunner, d) +} + +func (d *deleteData) QueryRowContext(ctx context.Context) RowScanner { + if d.RunWith == nil { + return &Row{err: RunnerNotSet} + } + queryRower, ok := d.RunWith.(QueryRowerContext) + if !ok { + if _, ok := d.RunWith.(QueryerContext); !ok { + return &Row{err: RunnerNotQueryRunner} + } + return &Row{err: NoContextSupport} + } + return QueryRowContextWith(ctx, queryRower, d) +} + +// ExecContext builds and ExecContexts the query with the Runner set by RunWith. +func (b DeleteBuilder) ExecContext(ctx context.Context) (sql.Result, error) { + data := builder.GetStruct(b).(deleteData) + return data.ExecContext(ctx) +} + +// QueryContext builds and QueryContexts the query with the Runner set by RunWith. +func (b DeleteBuilder) QueryContext(ctx context.Context) (*sql.Rows, error) { + data := builder.GetStruct(b).(deleteData) + return data.QueryContext(ctx) +} + +// QueryRowContext builds and QueryRowContexts the query with the Runner set by RunWith. +func (b DeleteBuilder) QueryRowContext(ctx context.Context) RowScanner { + data := builder.GetStruct(b).(deleteData) + return data.QueryRowContext(ctx) +} + +// ScanContext is a shortcut for QueryRowContext().Scan. +func (b DeleteBuilder) ScanContext(ctx context.Context, dest ...interface{}) error { + return b.QueryRowContext(ctx).Scan(dest...) +} diff --git a/vendor/github.com/mattermost/squirrel/expr.go b/vendor/github.com/mattermost/squirrel/expr.go new file mode 100644 index 00000000..576c66a7 --- /dev/null +++ b/vendor/github.com/mattermost/squirrel/expr.go @@ -0,0 +1,434 @@ +package squirrel + +import ( + "bytes" + "database/sql/driver" + "fmt" + "reflect" + "sort" + "strings" +) + +const ( + // Portable true/false literals. + sqlTrue = "(1=1)" + sqlFalse = "(1=0)" +) + +type expr struct { + sql string + args []interface{} +} + +// Expr builds an expression from a SQL fragment and arguments. +// +// Ex: +// Expr("FROM_UNIXTIME(?)", t) +func Expr(sql string, args ...interface{}) Sqlizer { + return expr{sql: sql, args: args} +} + +func (e expr) ToSql() (sql string, args []interface{}, err error) { + simple := true + for _, arg := range e.args { + if _, ok := arg.(Sqlizer); ok { + simple = false + } + } + if simple { + return e.sql, e.args, nil + } + + buf := &bytes.Buffer{} + ap := e.args + sp := e.sql + + var isql string + var iargs []interface{} + + for err == nil && len(ap) > 0 && len(sp) > 0 { + i := strings.Index(sp, "?") + if i < 0 { + // no more placeholders + break + } + if len(sp) > i+1 && sp[i+1:i+2] == "?" { + // escaped "??"; append it and step past + buf.WriteString(sp[:i+2]) + sp = sp[i+2:] + continue + } + + if as, ok := ap[0].(Sqlizer); ok { + // sqlizer argument; expand it and append the result + isql, iargs, err = as.ToSql() + buf.WriteString(sp[:i]) + buf.WriteString(isql) + args = append(args, iargs...) + } else { + // normal argument; append it and the placeholder + buf.WriteString(sp[:i+1]) + args = append(args, ap[0]) + } + + // step past the argument and placeholder + ap = ap[1:] + sp = sp[i+1:] + } + + // append the remaining sql and arguments + buf.WriteString(sp) + return buf.String(), append(args, ap...), err +} + +type concatExpr []interface{} + +func (ce concatExpr) ToSql() (sql string, args []interface{}, err error) { + for _, part := range ce { + switch p := part.(type) { + case string: + sql += p + case Sqlizer: + pSql, pArgs, err := p.ToSql() + if err != nil { + return "", nil, err + } + sql += pSql + args = append(args, pArgs...) + default: + return "", nil, fmt.Errorf("%#v is not a string or Sqlizer", part) + } + } + return +} + +// ConcatExpr builds an expression by concatenating strings and other expressions. +// +// Ex: +// name_expr := Expr("CONCAT(?, ' ', ?)", firstName, lastName) +// ConcatExpr("COALESCE(full_name,", name_expr, ")") +func ConcatExpr(parts ...interface{}) concatExpr { + return concatExpr(parts) +} + +// aliasExpr helps to alias part of SQL query generated with underlying "expr" +type aliasExpr struct { + expr Sqlizer + alias string +} + +// Alias allows to define alias for column in SelectBuilder. Useful when column is +// defined as complex expression like IF or CASE +// Ex: +// .Column(Alias(caseStmt, "case_column")) +func Alias(expr Sqlizer, alias string) aliasExpr { + return aliasExpr{expr, alias} +} + +func (e aliasExpr) ToSql() (sql string, args []interface{}, err error) { + sql, args, err = e.expr.ToSql() + if err == nil { + sql = fmt.Sprintf("(%s) AS %s", sql, e.alias) + } + return +} + +// Eq is syntactic sugar for use with Where/Having/Set methods. +type Eq map[string]interface{} + +func (eq Eq) toSQL(useNotOpr bool) (sql string, args []interface{}, err error) { + if len(eq) == 0 { + // Empty Sql{} evaluates to true. + sql = sqlTrue + return + } + + var ( + exprs []string + equalOpr = "=" + inOpr = "IN" + nullOpr = "IS" + inEmptyExpr = sqlFalse + ) + + if useNotOpr { + equalOpr = "<>" + inOpr = "NOT IN" + nullOpr = "IS NOT" + inEmptyExpr = sqlTrue + } + + sortedKeys := getSortedKeys(eq) + for _, key := range sortedKeys { + var expr string + val := eq[key] + + switch v := val.(type) { + case driver.Valuer: + if val, err = v.Value(); err != nil { + return + } + + // If assigned to a SELECT builder, use IN or NOT IN with the key, + // simplifying the construction of nested queries. + case SelectBuilder: + var sqlVal string + var sqlArgs []interface{} + v = v.PlaceholderFormat(Question) + sqlVal, sqlArgs, err = v.ToSql() + if err != nil { + return + } + expr = fmt.Sprintf("%s %s (%s)", key, inOpr, sqlVal) + args = append(args, sqlArgs...) + exprs = append(exprs, expr) + continue + } + + r := reflect.ValueOf(val) + if r.Kind() == reflect.Ptr { + if r.IsNil() { + val = nil + } else { + val = r.Elem().Interface() + } + } + + if val == nil { + expr = fmt.Sprintf("%s %s NULL", key, nullOpr) + } else { + if isListType(val) { + valVal := reflect.ValueOf(val) + if valVal.Len() == 0 { + expr = inEmptyExpr + if args == nil { + args = []interface{}{} + } + } else { + for i := 0; i < valVal.Len(); i++ { + args = append(args, valVal.Index(i).Interface()) + } + expr = fmt.Sprintf("%s %s (%s)", key, inOpr, Placeholders(valVal.Len())) + } + } else { + expr = fmt.Sprintf("%s %s ?", key, equalOpr) + args = append(args, val) + } + } + exprs = append(exprs, expr) + } + sql = strings.Join(exprs, " AND ") + return +} + +func (eq Eq) ToSql() (sql string, args []interface{}, err error) { + return eq.toSQL(false) +} + +// NotEq is syntactic sugar for use with Where/Having/Set methods. +// Ex: +// .Where(NotEq{"id": 1}) == "id <> 1" +type NotEq Eq + +func (neq NotEq) ToSql() (sql string, args []interface{}, err error) { + return Eq(neq).toSQL(true) +} + +// Like is syntactic sugar for use with LIKE conditions. +// Ex: +// .Where(Like{"name": "%irrel"}) +type Like map[string]interface{} + +func (lk Like) toSql(opr string) (sql string, args []interface{}, err error) { + var exprs []string + for key, val := range lk { + expr := "" + + switch v := val.(type) { + case driver.Valuer: + if val, err = v.Value(); err != nil { + return + } + } + + if val == nil { + err = fmt.Errorf("cannot use null with like operators") + return + } else { + if isListType(val) { + err = fmt.Errorf("cannot use array or slice with like operators") + return + } else { + expr = fmt.Sprintf("%s %s ?", key, opr) + args = append(args, val) + } + } + exprs = append(exprs, expr) + } + sql = strings.Join(exprs, " AND ") + return +} + +func (lk Like) ToSql() (sql string, args []interface{}, err error) { + return lk.toSql("LIKE") +} + +// NotLike is syntactic sugar for use with LIKE conditions. +// Ex: +// .Where(NotLike{"name": "%irrel"}) +type NotLike Like + +func (nlk NotLike) ToSql() (sql string, args []interface{}, err error) { + return Like(nlk).toSql("NOT LIKE") +} + +// ILike is syntactic sugar for use with ILIKE conditions. +// Ex: +// .Where(ILike{"name": "sq%"}) +type ILike Like + +func (ilk ILike) ToSql() (sql string, args []interface{}, err error) { + return Like(ilk).toSql("ILIKE") +} + +// NotILike is syntactic sugar for use with ILIKE conditions. +// Ex: +// .Where(NotILike{"name": "sq%"}) +type NotILike Like + +func (nilk NotILike) ToSql() (sql string, args []interface{}, err error) { + return Like(nilk).toSql("NOT ILIKE") +} + +// Lt is syntactic sugar for use with Where/Having/Set methods. +// Ex: +// .Where(Lt{"id": 1}) +type Lt map[string]interface{} + +func (lt Lt) toSql(opposite, orEq bool) (sql string, args []interface{}, err error) { + var ( + exprs []string + opr = "<" + ) + + if opposite { + opr = ">" + } + + if orEq { + opr = fmt.Sprintf("%s%s", opr, "=") + } + + sortedKeys := getSortedKeys(lt) + for _, key := range sortedKeys { + var expr string + val := lt[key] + + switch v := val.(type) { + case driver.Valuer: + if val, err = v.Value(); err != nil { + return + } + } + + if val == nil { + err = fmt.Errorf("cannot use null with less than or greater than operators") + return + } + if isListType(val) { + err = fmt.Errorf("cannot use array or slice with less than or greater than operators") + return + } + expr = fmt.Sprintf("%s %s ?", key, opr) + args = append(args, val) + + exprs = append(exprs, expr) + } + sql = strings.Join(exprs, " AND ") + return +} + +func (lt Lt) ToSql() (sql string, args []interface{}, err error) { + return lt.toSql(false, false) +} + +// LtOrEq is syntactic sugar for use with Where/Having/Set methods. +// Ex: +// .Where(LtOrEq{"id": 1}) == "id <= 1" +type LtOrEq Lt + +func (ltOrEq LtOrEq) ToSql() (sql string, args []interface{}, err error) { + return Lt(ltOrEq).toSql(false, true) +} + +// Gt is syntactic sugar for use with Where/Having/Set methods. +// Ex: +// .Where(Gt{"id": 1}) == "id > 1" +type Gt Lt + +func (gt Gt) ToSql() (sql string, args []interface{}, err error) { + return Lt(gt).toSql(true, false) +} + +// GtOrEq is syntactic sugar for use with Where/Having/Set methods. +// Ex: +// .Where(GtOrEq{"id": 1}) == "id >= 1" +type GtOrEq Lt + +func (gtOrEq GtOrEq) ToSql() (sql string, args []interface{}, err error) { + return Lt(gtOrEq).toSql(true, true) +} + +type conj []Sqlizer + +func (c conj) join(sep, defaultExpr string) (sql string, args []interface{}, err error) { + if len(c) == 0 { + return defaultExpr, []interface{}{}, nil + } + var sqlParts []string + for _, sqlizer := range c { + partSQL, partArgs, err := nestedToSql(sqlizer) + if err != nil { + return "", nil, err + } + if partSQL != "" { + sqlParts = append(sqlParts, partSQL) + args = append(args, partArgs...) + } + } + if len(sqlParts) > 0 { + sql = fmt.Sprintf("(%s)", strings.Join(sqlParts, sep)) + } + return +} + +// And conjunction Sqlizers +type And conj + +func (a And) ToSql() (string, []interface{}, error) { + return conj(a).join(" AND ", sqlTrue) +} + +// Or conjunction Sqlizers +type Or conj + +func (o Or) ToSql() (string, []interface{}, error) { + return conj(o).join(" OR ", sqlFalse) +} + +func getSortedKeys(exp map[string]interface{}) []string { + sortedKeys := make([]string, 0, len(exp)) + for k := range exp { + sortedKeys = append(sortedKeys, k) + } + sort.Strings(sortedKeys) + return sortedKeys +} + +func isListType(val interface{}) bool { + if driver.IsValue(val) { + return false + } + valVal := reflect.ValueOf(val) + return valVal.Kind() == reflect.Array || valVal.Kind() == reflect.Slice +} diff --git a/vendor/github.com/mattermost/squirrel/insert.go b/vendor/github.com/mattermost/squirrel/insert.go new file mode 100644 index 00000000..c23a5793 --- /dev/null +++ b/vendor/github.com/mattermost/squirrel/insert.go @@ -0,0 +1,298 @@ +package squirrel + +import ( + "bytes" + "database/sql" + "errors" + "fmt" + "io" + "sort" + "strings" + + "github.com/lann/builder" +) + +type insertData struct { + PlaceholderFormat PlaceholderFormat + RunWith BaseRunner + Prefixes []Sqlizer + StatementKeyword string + Options []string + Into string + Columns []string + Values [][]interface{} + Suffixes []Sqlizer + Select *SelectBuilder +} + +func (d *insertData) Exec() (sql.Result, error) { + if d.RunWith == nil { + return nil, RunnerNotSet + } + return ExecWith(d.RunWith, d) +} + +func (d *insertData) Query() (*sql.Rows, error) { + if d.RunWith == nil { + return nil, RunnerNotSet + } + return QueryWith(d.RunWith, d) +} + +func (d *insertData) QueryRow() RowScanner { + if d.RunWith == nil { + return &Row{err: RunnerNotSet} + } + queryRower, ok := d.RunWith.(QueryRower) + if !ok { + return &Row{err: RunnerNotQueryRunner} + } + return QueryRowWith(queryRower, d) +} + +func (d *insertData) ToSql() (sqlStr string, args []interface{}, err error) { + if len(d.Into) == 0 { + err = errors.New("insert statements must specify a table") + return + } + if len(d.Values) == 0 && d.Select == nil { + err = errors.New("insert statements must have at least one set of values or select clause") + return + } + + sql := &bytes.Buffer{} + + if len(d.Prefixes) > 0 { + args, err = appendToSql(d.Prefixes, sql, " ", args) + if err != nil { + return + } + + sql.WriteString(" ") + } + + if d.StatementKeyword == "" { + sql.WriteString("INSERT ") + } else { + sql.WriteString(d.StatementKeyword) + sql.WriteString(" ") + } + + if len(d.Options) > 0 { + sql.WriteString(strings.Join(d.Options, " ")) + sql.WriteString(" ") + } + + sql.WriteString("INTO ") + sql.WriteString(d.Into) + sql.WriteString(" ") + + if len(d.Columns) > 0 { + sql.WriteString("(") + sql.WriteString(strings.Join(d.Columns, ",")) + sql.WriteString(") ") + } + + if d.Select != nil { + args, err = d.appendSelectToSQL(sql, args) + } else { + args, err = d.appendValuesToSQL(sql, args) + } + if err != nil { + return + } + + if len(d.Suffixes) > 0 { + sql.WriteString(" ") + args, err = appendToSql(d.Suffixes, sql, " ", args) + if err != nil { + return + } + } + + sqlStr, err = d.PlaceholderFormat.ReplacePlaceholders(sql.String()) + return +} + +func (d *insertData) appendValuesToSQL(w io.Writer, args []interface{}) ([]interface{}, error) { + if len(d.Values) == 0 { + return args, errors.New("values for insert statements are not set") + } + + io.WriteString(w, "VALUES ") + + valuesStrings := make([]string, len(d.Values)) + for r, row := range d.Values { + valueStrings := make([]string, len(row)) + for v, val := range row { + if vs, ok := val.(Sqlizer); ok { + vsql, vargs, err := vs.ToSql() + if err != nil { + return nil, err + } + valueStrings[v] = vsql + args = append(args, vargs...) + } else { + valueStrings[v] = "?" + args = append(args, val) + } + } + valuesStrings[r] = fmt.Sprintf("(%s)", strings.Join(valueStrings, ",")) + } + + io.WriteString(w, strings.Join(valuesStrings, ",")) + + return args, nil +} + +func (d *insertData) appendSelectToSQL(w io.Writer, args []interface{}) ([]interface{}, error) { + if d.Select == nil { + return args, errors.New("select clause for insert statements are not set") + } + + selectClause, sArgs, err := d.Select.ToSql() + if err != nil { + return args, err + } + + io.WriteString(w, selectClause) + args = append(args, sArgs...) + + return args, nil +} + +// Builder + +// InsertBuilder builds SQL INSERT statements. +type InsertBuilder builder.Builder + +func init() { + builder.Register(InsertBuilder{}, insertData{}) +} + +// Format methods + +// PlaceholderFormat sets PlaceholderFormat (e.g. Question or Dollar) for the +// query. +func (b InsertBuilder) PlaceholderFormat(f PlaceholderFormat) InsertBuilder { + return builder.Set(b, "PlaceholderFormat", f).(InsertBuilder) +} + +// Runner methods + +// RunWith sets a Runner (like database/sql.DB) to be used with e.g. Exec. +func (b InsertBuilder) RunWith(runner BaseRunner) InsertBuilder { + return setRunWith(b, runner).(InsertBuilder) +} + +// Exec builds and Execs the query with the Runner set by RunWith. +func (b InsertBuilder) Exec() (sql.Result, error) { + data := builder.GetStruct(b).(insertData) + return data.Exec() +} + +// Query builds and Querys the query with the Runner set by RunWith. +func (b InsertBuilder) Query() (*sql.Rows, error) { + data := builder.GetStruct(b).(insertData) + return data.Query() +} + +// QueryRow builds and QueryRows the query with the Runner set by RunWith. +func (b InsertBuilder) QueryRow() RowScanner { + data := builder.GetStruct(b).(insertData) + return data.QueryRow() +} + +// Scan is a shortcut for QueryRow().Scan. +func (b InsertBuilder) Scan(dest ...interface{}) error { + return b.QueryRow().Scan(dest...) +} + +// SQL methods + +// ToSql builds the query into a SQL string and bound args. +func (b InsertBuilder) ToSql() (string, []interface{}, error) { + data := builder.GetStruct(b).(insertData) + return data.ToSql() +} + +// MustSql builds the query into a SQL string and bound args. +// It panics if there are any errors. +func (b InsertBuilder) MustSql() (string, []interface{}) { + sql, args, err := b.ToSql() + if err != nil { + panic(err) + } + return sql, args +} + +// Prefix adds an expression to the beginning of the query +func (b InsertBuilder) Prefix(sql string, args ...interface{}) InsertBuilder { + return b.PrefixExpr(Expr(sql, args...)) +} + +// PrefixExpr adds an expression to the very beginning of the query +func (b InsertBuilder) PrefixExpr(expr Sqlizer) InsertBuilder { + return builder.Append(b, "Prefixes", expr).(InsertBuilder) +} + +// Options adds keyword options before the INTO clause of the query. +func (b InsertBuilder) Options(options ...string) InsertBuilder { + return builder.Extend(b, "Options", options).(InsertBuilder) +} + +// Into sets the INTO clause of the query. +func (b InsertBuilder) Into(from string) InsertBuilder { + return builder.Set(b, "Into", from).(InsertBuilder) +} + +// Columns adds insert columns to the query. +func (b InsertBuilder) Columns(columns ...string) InsertBuilder { + return builder.Extend(b, "Columns", columns).(InsertBuilder) +} + +// Values adds a single row's values to the query. +func (b InsertBuilder) Values(values ...interface{}) InsertBuilder { + return builder.Append(b, "Values", values).(InsertBuilder) +} + +// Suffix adds an expression to the end of the query +func (b InsertBuilder) Suffix(sql string, args ...interface{}) InsertBuilder { + return b.SuffixExpr(Expr(sql, args...)) +} + +// SuffixExpr adds an expression to the end of the query +func (b InsertBuilder) SuffixExpr(expr Sqlizer) InsertBuilder { + return builder.Append(b, "Suffixes", expr).(InsertBuilder) +} + +// SetMap set columns and values for insert builder from a map of column name and value +// note that it will reset all previous columns and values was set if any +func (b InsertBuilder) SetMap(clauses map[string]interface{}) InsertBuilder { + // Keep the columns in a consistent order by sorting the column key string. + cols := make([]string, 0, len(clauses)) + for col := range clauses { + cols = append(cols, col) + } + sort.Strings(cols) + + vals := make([]interface{}, 0, len(clauses)) + for _, col := range cols { + vals = append(vals, clauses[col]) + } + + b = builder.Set(b, "Columns", cols).(InsertBuilder) + b = builder.Set(b, "Values", [][]interface{}{vals}).(InsertBuilder) + + return b +} + +// Select set Select clause for insert query +// If Values and Select are used, then Select has higher priority +func (b InsertBuilder) Select(sb SelectBuilder) InsertBuilder { + return builder.Set(b, "Select", &sb).(InsertBuilder) +} + +func (b InsertBuilder) statementKeyword(keyword string) InsertBuilder { + return builder.Set(b, "StatementKeyword", keyword).(InsertBuilder) +} diff --git a/vendor/github.com/mattermost/squirrel/insert_ctx.go b/vendor/github.com/mattermost/squirrel/insert_ctx.go new file mode 100644 index 00000000..4541c2fe --- /dev/null +++ b/vendor/github.com/mattermost/squirrel/insert_ctx.go @@ -0,0 +1,69 @@ +// +build go1.8 + +package squirrel + +import ( + "context" + "database/sql" + + "github.com/lann/builder" +) + +func (d *insertData) ExecContext(ctx context.Context) (sql.Result, error) { + if d.RunWith == nil { + return nil, RunnerNotSet + } + ctxRunner, ok := d.RunWith.(ExecerContext) + if !ok { + return nil, NoContextSupport + } + return ExecContextWith(ctx, ctxRunner, d) +} + +func (d *insertData) QueryContext(ctx context.Context) (*sql.Rows, error) { + if d.RunWith == nil { + return nil, RunnerNotSet + } + ctxRunner, ok := d.RunWith.(QueryerContext) + if !ok { + return nil, NoContextSupport + } + return QueryContextWith(ctx, ctxRunner, d) +} + +func (d *insertData) QueryRowContext(ctx context.Context) RowScanner { + if d.RunWith == nil { + return &Row{err: RunnerNotSet} + } + queryRower, ok := d.RunWith.(QueryRowerContext) + if !ok { + if _, ok := d.RunWith.(QueryerContext); !ok { + return &Row{err: RunnerNotQueryRunner} + } + return &Row{err: NoContextSupport} + } + return QueryRowContextWith(ctx, queryRower, d) +} + +// ExecContext builds and ExecContexts the query with the Runner set by RunWith. +func (b InsertBuilder) ExecContext(ctx context.Context) (sql.Result, error) { + data := builder.GetStruct(b).(insertData) + return data.ExecContext(ctx) +} + +// QueryContext builds and QueryContexts the query with the Runner set by RunWith. +func (b InsertBuilder) QueryContext(ctx context.Context) (*sql.Rows, error) { + data := builder.GetStruct(b).(insertData) + return data.QueryContext(ctx) +} + +// QueryRowContext builds and QueryRowContexts the query with the Runner set by RunWith. +func (b InsertBuilder) QueryRowContext(ctx context.Context) RowScanner { + data := builder.GetStruct(b).(insertData) + return data.QueryRowContext(ctx) +} + +// ScanContext is a shortcut for QueryRowContext().Scan. +func (b InsertBuilder) ScanContext(ctx context.Context, dest ...interface{}) error { + return b.QueryRowContext(ctx).Scan(dest...) +} diff --git a/vendor/github.com/mattermost/squirrel/part.go b/vendor/github.com/mattermost/squirrel/part.go new file mode 100644 index 00000000..f3a7b154 --- /dev/null +++ b/vendor/github.com/mattermost/squirrel/part.go @@ -0,0 +1,63 @@ +package squirrel + +import ( + "fmt" + "io" +) + +type part struct { + pred interface{} + args []interface{} +} + +func newPart(pred interface{}, args ...interface{}) Sqlizer { + return &part{pred, args} +} + +func (p part) ToSql() (sql string, args []interface{}, err error) { + switch pred := p.pred.(type) { + case nil: + // no-op + case Sqlizer: + sql, args, err = pred.ToSql() + case string: + sql = pred + args = p.args + default: + err = fmt.Errorf("expected string or Sqlizer, not %T", pred) + } + return +} + +func nestedToSql(s Sqlizer) (string, []interface{}, error) { + if raw, ok := s.(rawSqlizer); ok { + return raw.toSqlRaw() + } else { + return s.ToSql() + } +} + +func appendToSql(parts []Sqlizer, w io.Writer, sep string, args []interface{}) ([]interface{}, error) { + for i, p := range parts { + partSql, partArgs, err := nestedToSql(p) + if err != nil { + return nil, err + } else if len(partSql) == 0 { + continue + } + + if i > 0 { + _, err := io.WriteString(w, sep) + if err != nil { + return nil, err + } + } + + _, err = io.WriteString(w, partSql) + if err != nil { + return nil, err + } + args = append(args, partArgs...) + } + return args, nil +} diff --git a/vendor/github.com/mattermost/squirrel/placeholder.go b/vendor/github.com/mattermost/squirrel/placeholder.go new file mode 100644 index 00000000..8e97a6c6 --- /dev/null +++ b/vendor/github.com/mattermost/squirrel/placeholder.go @@ -0,0 +1,114 @@ +package squirrel + +import ( + "bytes" + "fmt" + "strings" +) + +// PlaceholderFormat is the interface that wraps the ReplacePlaceholders method. +// +// ReplacePlaceholders takes a SQL statement and replaces each question mark +// placeholder with a (possibly different) SQL placeholder. +type PlaceholderFormat interface { + ReplacePlaceholders(sql string) (string, error) +} + +type placeholderDebugger interface { + debugPlaceholder() string +} + +var ( + // Question is a PlaceholderFormat instance that leaves placeholders as + // question marks. + Question = questionFormat{} + + // Dollar is a PlaceholderFormat instance that replaces placeholders with + // dollar-prefixed positional placeholders (e.g. $1, $2, $3). + Dollar = dollarFormat{} + + // Colon is a PlaceholderFormat instance that replaces placeholders with + // colon-prefixed positional placeholders (e.g. :1, :2, :3). + Colon = colonFormat{} + + // AtP is a PlaceholderFormat instance that replaces placeholders with + // "@p"-prefixed positional placeholders (e.g. @p1, @p2, @p3). + AtP = atpFormat{} +) + +type questionFormat struct{} + +func (questionFormat) ReplacePlaceholders(sql string) (string, error) { + return sql, nil +} + +func (questionFormat) debugPlaceholder() string { + return "?" +} + +type dollarFormat struct{} + +func (dollarFormat) ReplacePlaceholders(sql string) (string, error) { + return replacePositionalPlaceholders(sql, "$") +} + +func (dollarFormat) debugPlaceholder() string { + return "$" +} + +type colonFormat struct{} + +func (colonFormat) ReplacePlaceholders(sql string) (string, error) { + return replacePositionalPlaceholders(sql, ":") +} + +func (colonFormat) debugPlaceholder() string { + return ":" +} + +type atpFormat struct{} + +func (atpFormat) ReplacePlaceholders(sql string) (string, error) { + return replacePositionalPlaceholders(sql, "@p") +} + +func (atpFormat) debugPlaceholder() string { + return "@p" +} + +// Placeholders returns a string with count ? placeholders joined with commas. +func Placeholders(count int) string { + if count < 1 { + return "" + } + + return strings.Repeat(",?", count)[1:] +} + +func replacePositionalPlaceholders(sql, prefix string) (string, error) { + buf := &bytes.Buffer{} + i := 0 + for { + p := strings.Index(sql, "?") + if p == -1 { + break + } + + if len(sql[p:]) > 1 && sql[p:p+2] == "??" { // escape ?? => ? + buf.WriteString(sql[:p]) + buf.WriteString("?") + if len(sql[p:]) == 1 { + break + } + sql = sql[p+2:] + } else { + i++ + buf.WriteString(sql[:p]) + fmt.Fprintf(buf, "%s%d", prefix, i) + sql = sql[p+1:] + } + } + + buf.WriteString(sql) + return buf.String(), nil +} diff --git a/vendor/github.com/mattermost/squirrel/row.go b/vendor/github.com/mattermost/squirrel/row.go new file mode 100644 index 00000000..74ffda92 --- /dev/null +++ b/vendor/github.com/mattermost/squirrel/row.go @@ -0,0 +1,22 @@ +package squirrel + +// RowScanner is the interface that wraps the Scan method. +// +// Scan behaves like database/sql.Row.Scan. +type RowScanner interface { + Scan(...interface{}) error +} + +// Row wraps database/sql.Row to let squirrel return new errors on Scan. +type Row struct { + RowScanner + err error +} + +// Scan returns Row.err or calls RowScanner.Scan. +func (r *Row) Scan(dest ...interface{}) error { + if r.err != nil { + return r.err + } + return r.RowScanner.Scan(dest...) +} diff --git a/vendor/github.com/mattermost/squirrel/select.go b/vendor/github.com/mattermost/squirrel/select.go new file mode 100644 index 00000000..b585344c --- /dev/null +++ b/vendor/github.com/mattermost/squirrel/select.go @@ -0,0 +1,396 @@ +package squirrel + +import ( + "bytes" + "database/sql" + "fmt" + "strings" + + "github.com/lann/builder" +) + +type selectData struct { + PlaceholderFormat PlaceholderFormat + RunWith BaseRunner + Prefixes []Sqlizer + Options []string + Columns []Sqlizer + From Sqlizer + Joins []Sqlizer + WhereParts []Sqlizer + GroupBys []string + HavingParts []Sqlizer + OrderByParts []Sqlizer + Limit string + Offset string + Suffixes []Sqlizer +} + +func (d *selectData) Exec() (sql.Result, error) { + if d.RunWith == nil { + return nil, RunnerNotSet + } + return ExecWith(d.RunWith, d) +} + +func (d *selectData) Query() (*sql.Rows, error) { + if d.RunWith == nil { + return nil, RunnerNotSet + } + return QueryWith(d.RunWith, d) +} + +func (d *selectData) QueryRow() RowScanner { + if d.RunWith == nil { + return &Row{err: RunnerNotSet} + } + queryRower, ok := d.RunWith.(QueryRower) + if !ok { + return &Row{err: RunnerNotQueryRunner} + } + return QueryRowWith(queryRower, d) +} + +func (d *selectData) ToSql() (sqlStr string, args []interface{}, err error) { + sqlStr, args, err = d.toSqlRaw() + if err != nil { + return + } + + sqlStr, err = d.PlaceholderFormat.ReplacePlaceholders(sqlStr) + return +} + +func (d *selectData) toSqlRaw() (sqlStr string, args []interface{}, err error) { + if len(d.Columns) == 0 { + err = fmt.Errorf("select statements must have at least one result column") + return + } + + sql := &bytes.Buffer{} + + if len(d.Prefixes) > 0 { + args, err = appendToSql(d.Prefixes, sql, " ", args) + if err != nil { + return + } + + sql.WriteString(" ") + } + + sql.WriteString("SELECT ") + + if len(d.Options) > 0 { + sql.WriteString(strings.Join(d.Options, " ")) + sql.WriteString(" ") + } + + if len(d.Columns) > 0 { + args, err = appendToSql(d.Columns, sql, ", ", args) + if err != nil { + return + } + } + + if d.From != nil { + sql.WriteString(" FROM ") + args, err = appendToSql([]Sqlizer{d.From}, sql, "", args) + if err != nil { + return + } + } + + if len(d.Joins) > 0 { + sql.WriteString(" ") + args, err = appendToSql(d.Joins, sql, " ", args) + if err != nil { + return + } + } + + if len(d.WhereParts) > 0 { + sql.WriteString(" WHERE ") + args, err = appendToSql(d.WhereParts, sql, " AND ", args) + if err != nil { + return + } + } + + if len(d.GroupBys) > 0 { + sql.WriteString(" GROUP BY ") + sql.WriteString(strings.Join(d.GroupBys, ", ")) + } + + if len(d.HavingParts) > 0 { + sql.WriteString(" HAVING ") + args, err = appendToSql(d.HavingParts, sql, " AND ", args) + if err != nil { + return + } + } + + if len(d.OrderByParts) > 0 { + sql.WriteString(" ORDER BY ") + args, err = appendToSql(d.OrderByParts, sql, ", ", args) + if err != nil { + return + } + } + + if len(d.Limit) > 0 { + sql.WriteString(" LIMIT ") + sql.WriteString(d.Limit) + } + + if len(d.Offset) > 0 { + sql.WriteString(" OFFSET ") + sql.WriteString(d.Offset) + } + + if len(d.Suffixes) > 0 { + sql.WriteString(" ") + + args, err = appendToSql(d.Suffixes, sql, " ", args) + if err != nil { + return + } + } + + sqlStr = sql.String() + return +} + +// Builder + +// SelectBuilder builds SQL SELECT statements. +type SelectBuilder builder.Builder + +func init() { + builder.Register(SelectBuilder{}, selectData{}) +} + +// Format methods + +// PlaceholderFormat sets PlaceholderFormat (e.g. Question or Dollar) for the +// query. +func (b SelectBuilder) PlaceholderFormat(f PlaceholderFormat) SelectBuilder { + return builder.Set(b, "PlaceholderFormat", f).(SelectBuilder) +} + +// Runner methods + +// RunWith sets a Runner (like database/sql.DB) to be used with e.g. Exec. +// For most cases runner will be a database connection. +// +// Internally we use this to mock out the database connection for testing. +func (b SelectBuilder) RunWith(runner BaseRunner) SelectBuilder { + return setRunWith(b, runner).(SelectBuilder) +} + +// Exec builds and Execs the query with the Runner set by RunWith. +func (b SelectBuilder) Exec() (sql.Result, error) { + data := builder.GetStruct(b).(selectData) + return data.Exec() +} + +// Query builds and Querys the query with the Runner set by RunWith. +func (b SelectBuilder) Query() (*sql.Rows, error) { + data := builder.GetStruct(b).(selectData) + return data.Query() +} + +// QueryRow builds and QueryRows the query with the Runner set by RunWith. +func (b SelectBuilder) QueryRow() RowScanner { + data := builder.GetStruct(b).(selectData) + return data.QueryRow() +} + +// Scan is a shortcut for QueryRow().Scan. +func (b SelectBuilder) Scan(dest ...interface{}) error { + return b.QueryRow().Scan(dest...) +} + +// SQL methods + +// ToSql builds the query into a SQL string and bound args. +func (b SelectBuilder) ToSql() (string, []interface{}, error) { + data := builder.GetStruct(b).(selectData) + return data.ToSql() +} + +func (b SelectBuilder) toSqlRaw() (string, []interface{}, error) { + data := builder.GetStruct(b).(selectData) + return data.toSqlRaw() +} + +// MustSql builds the query into a SQL string and bound args. +// It panics if there are any errors. +func (b SelectBuilder) MustSql() (string, []interface{}) { + sql, args, err := b.ToSql() + if err != nil { + panic(err) + } + return sql, args +} + +// Prefix adds an expression to the beginning of the query +func (b SelectBuilder) Prefix(sql string, args ...interface{}) SelectBuilder { + return b.PrefixExpr(Expr(sql, args...)) +} + +// PrefixExpr adds an expression to the very beginning of the query +func (b SelectBuilder) PrefixExpr(expr Sqlizer) SelectBuilder { + return builder.Append(b, "Prefixes", expr).(SelectBuilder) +} + +// Distinct adds a DISTINCT clause to the query. +func (b SelectBuilder) Distinct() SelectBuilder { + return b.Options("DISTINCT") +} + +// Options adds select option to the query +func (b SelectBuilder) Options(options ...string) SelectBuilder { + return builder.Extend(b, "Options", options).(SelectBuilder) +} + +// Columns adds result columns to the query. +func (b SelectBuilder) Columns(columns ...string) SelectBuilder { + parts := make([]interface{}, 0, len(columns)) + for _, str := range columns { + parts = append(parts, newPart(str)) + } + return builder.Extend(b, "Columns", parts).(SelectBuilder) +} + +// Column adds a result column to the query. +// Unlike Columns, Column accepts args which will be bound to placeholders in +// the columns string, for example: +// Column("IF(col IN ("+squirrel.Placeholders(3)+"), 1, 0) as col", 1, 2, 3) +func (b SelectBuilder) Column(column interface{}, args ...interface{}) SelectBuilder { + return builder.Append(b, "Columns", newPart(column, args...)).(SelectBuilder) +} + +// From sets the FROM clause of the query. +func (b SelectBuilder) From(from string) SelectBuilder { + return builder.Set(b, "From", newPart(from)).(SelectBuilder) +} + +// FromSelect sets a subquery into the FROM clause of the query. +func (b SelectBuilder) FromSelect(from SelectBuilder, alias string) SelectBuilder { + // Prevent misnumbered parameters in nested selects (#183). + from = from.PlaceholderFormat(Question) + return builder.Set(b, "From", Alias(from, alias)).(SelectBuilder) +} + +// JoinClause adds a join clause to the query. +func (b SelectBuilder) JoinClause(pred interface{}, args ...interface{}) SelectBuilder { + return builder.Append(b, "Joins", newPart(pred, args...)).(SelectBuilder) +} + +// Join adds a JOIN clause to the query. +func (b SelectBuilder) Join(join string, rest ...interface{}) SelectBuilder { + return b.JoinClause("JOIN "+join, rest...) +} + +// LeftJoin adds a LEFT JOIN clause to the query. +func (b SelectBuilder) LeftJoin(join string, rest ...interface{}) SelectBuilder { + return b.JoinClause("LEFT JOIN "+join, rest...) +} + +// RightJoin adds a RIGHT JOIN clause to the query. +func (b SelectBuilder) RightJoin(join string, rest ...interface{}) SelectBuilder { + return b.JoinClause("RIGHT JOIN "+join, rest...) +} + +// InnerJoin adds a INNER JOIN clause to the query. +func (b SelectBuilder) InnerJoin(join string, rest ...interface{}) SelectBuilder { + return b.JoinClause("INNER JOIN "+join, rest...) +} + +// CrossJoin adds a CROSS JOIN clause to the query. +func (b SelectBuilder) CrossJoin(join string, rest ...interface{}) SelectBuilder { + return b.JoinClause("CROSS JOIN "+join, rest...) +} + +// Where adds an expression to the WHERE clause of the query. +// +// Expressions are ANDed together in the generated SQL. +// +// Where accepts several types for its pred argument: +// +// nil OR "" - ignored. +// +// string - SQL expression. +// If the expression has SQL placeholders then a set of arguments must be passed +// as well, one for each placeholder. +// +// map[string]interface{} OR Eq - map of SQL expressions to values. Each key is +// transformed into an expression like " = ?", with the corresponding value +// bound to the placeholder. If the value is nil, the expression will be " +// IS NULL". If the value is an array or slice, the expression will be " IN +// (?,?,...)", with one placeholder for each item in the value. These expressions +// are ANDed together. +// +// Where will panic if pred isn't any of the above types. +func (b SelectBuilder) Where(pred interface{}, args ...interface{}) SelectBuilder { + if pred == nil || pred == "" { + return b + } + return builder.Append(b, "WhereParts", newWherePart(pred, args...)).(SelectBuilder) +} + +// GroupBy adds GROUP BY expressions to the query. +func (b SelectBuilder) GroupBy(groupBys ...string) SelectBuilder { + return builder.Extend(b, "GroupBys", groupBys).(SelectBuilder) +} + +// Having adds an expression to the HAVING clause of the query. +// +// See Where. +func (b SelectBuilder) Having(pred interface{}, rest ...interface{}) SelectBuilder { + return builder.Append(b, "HavingParts", newWherePart(pred, rest...)).(SelectBuilder) +} + +// OrderByClause adds ORDER BY clause to the query. +func (b SelectBuilder) OrderByClause(pred interface{}, args ...interface{}) SelectBuilder { + return builder.Append(b, "OrderByParts", newPart(pred, args...)).(SelectBuilder) +} + +// OrderBy adds ORDER BY expressions to the query. +func (b SelectBuilder) OrderBy(orderBys ...string) SelectBuilder { + for _, orderBy := range orderBys { + b = b.OrderByClause(orderBy) + } + + return b +} + +// Limit sets a LIMIT clause on the query. +func (b SelectBuilder) Limit(limit uint64) SelectBuilder { + return builder.Set(b, "Limit", fmt.Sprintf("%d", limit)).(SelectBuilder) +} + +// Limit ALL allows to access all records with limit +func (b SelectBuilder) RemoveLimit() SelectBuilder { + return builder.Delete(b, "Limit").(SelectBuilder) +} + +// Offset sets a OFFSET clause on the query. +func (b SelectBuilder) Offset(offset uint64) SelectBuilder { + return builder.Set(b, "Offset", fmt.Sprintf("%d", offset)).(SelectBuilder) +} + +// RemoveOffset removes OFFSET clause. +func (b SelectBuilder) RemoveOffset() SelectBuilder { + return builder.Delete(b, "Offset").(SelectBuilder) +} + +// Suffix adds an expression to the end of the query +func (b SelectBuilder) Suffix(sql string, args ...interface{}) SelectBuilder { + return b.SuffixExpr(Expr(sql, args...)) +} + +// SuffixExpr adds an expression to the end of the query +func (b SelectBuilder) SuffixExpr(expr Sqlizer) SelectBuilder { + return builder.Append(b, "Suffixes", expr).(SelectBuilder) +} diff --git a/vendor/github.com/mattermost/squirrel/select_ctx.go b/vendor/github.com/mattermost/squirrel/select_ctx.go new file mode 100644 index 00000000..4c42c13f --- /dev/null +++ b/vendor/github.com/mattermost/squirrel/select_ctx.go @@ -0,0 +1,69 @@ +// +build go1.8 + +package squirrel + +import ( + "context" + "database/sql" + + "github.com/lann/builder" +) + +func (d *selectData) ExecContext(ctx context.Context) (sql.Result, error) { + if d.RunWith == nil { + return nil, RunnerNotSet + } + ctxRunner, ok := d.RunWith.(ExecerContext) + if !ok { + return nil, NoContextSupport + } + return ExecContextWith(ctx, ctxRunner, d) +} + +func (d *selectData) QueryContext(ctx context.Context) (*sql.Rows, error) { + if d.RunWith == nil { + return nil, RunnerNotSet + } + ctxRunner, ok := d.RunWith.(QueryerContext) + if !ok { + return nil, NoContextSupport + } + return QueryContextWith(ctx, ctxRunner, d) +} + +func (d *selectData) QueryRowContext(ctx context.Context) RowScanner { + if d.RunWith == nil { + return &Row{err: RunnerNotSet} + } + queryRower, ok := d.RunWith.(QueryRowerContext) + if !ok { + if _, ok := d.RunWith.(QueryerContext); !ok { + return &Row{err: RunnerNotQueryRunner} + } + return &Row{err: NoContextSupport} + } + return QueryRowContextWith(ctx, queryRower, d) +} + +// ExecContext builds and ExecContexts the query with the Runner set by RunWith. +func (b SelectBuilder) ExecContext(ctx context.Context) (sql.Result, error) { + data := builder.GetStruct(b).(selectData) + return data.ExecContext(ctx) +} + +// QueryContext builds and QueryContexts the query with the Runner set by RunWith. +func (b SelectBuilder) QueryContext(ctx context.Context) (*sql.Rows, error) { + data := builder.GetStruct(b).(selectData) + return data.QueryContext(ctx) +} + +// QueryRowContext builds and QueryRowContexts the query with the Runner set by RunWith. +func (b SelectBuilder) QueryRowContext(ctx context.Context) RowScanner { + data := builder.GetStruct(b).(selectData) + return data.QueryRowContext(ctx) +} + +// ScanContext is a shortcut for QueryRowContext().Scan. +func (b SelectBuilder) ScanContext(ctx context.Context, dest ...interface{}) error { + return b.QueryRowContext(ctx).Scan(dest...) +} diff --git a/vendor/github.com/mattermost/squirrel/squirrel.go b/vendor/github.com/mattermost/squirrel/squirrel.go new file mode 100644 index 00000000..c0e21952 --- /dev/null +++ b/vendor/github.com/mattermost/squirrel/squirrel.go @@ -0,0 +1,183 @@ +// Package squirrel provides a fluent SQL generator. +// +// See https://github.com/mattermost/squirrel for examples. +package squirrel + +import ( + "bytes" + "database/sql" + "fmt" + "strings" + + "github.com/lann/builder" +) + +// Sqlizer is the interface that wraps the ToSql method. +// +// ToSql returns a SQL representation of the Sqlizer, along with a slice of args +// as passed to e.g. database/sql.Exec. It can also return an error. +type Sqlizer interface { + ToSql() (string, []interface{}, error) +} + +// rawSqlizer is expected to do what Sqlizer does, but without finalizing placeholders. +// This is useful for nested queries. +type rawSqlizer interface { + toSqlRaw() (string, []interface{}, error) +} + +// Execer is the interface that wraps the Exec method. +// +// Exec executes the given query as implemented by database/sql.Exec. +type Execer interface { + Exec(query string, args ...interface{}) (sql.Result, error) +} + +// Queryer is the interface that wraps the Query method. +// +// Query executes the given query as implemented by database/sql.Query. +type Queryer interface { + Query(query string, args ...interface{}) (*sql.Rows, error) +} + +// QueryRower is the interface that wraps the QueryRow method. +// +// QueryRow executes the given query as implemented by database/sql.QueryRow. +type QueryRower interface { + QueryRow(query string, args ...interface{}) RowScanner +} + +// BaseRunner groups the Execer and Queryer interfaces. +type BaseRunner interface { + Execer + Queryer +} + +// Runner groups the Execer, Queryer, and QueryRower interfaces. +type Runner interface { + Execer + Queryer + QueryRower +} + +// WrapStdSql wraps a type implementing the standard SQL interface with methods that +// squirrel expects. +func WrapStdSql(stdSql StdSql) Runner { + return &stdsqlRunner{stdSql} +} + +// StdSql encompasses the standard methods of the *sql.DB type, and other types that +// wrap these methods. +type StdSql interface { + Query(string, ...interface{}) (*sql.Rows, error) + QueryRow(string, ...interface{}) *sql.Row + Exec(string, ...interface{}) (sql.Result, error) +} + +type stdsqlRunner struct { + StdSql +} + +func (r *stdsqlRunner) QueryRow(query string, args ...interface{}) RowScanner { + return r.StdSql.QueryRow(query, args...) +} + +func setRunWith(b interface{}, runner BaseRunner) interface{} { + switch r := runner.(type) { + case StdSqlCtx: + runner = WrapStdSqlCtx(r) + case StdSql: + runner = WrapStdSql(r) + } + return builder.Set(b, "RunWith", runner) +} + +// RunnerNotSet is returned by methods that need a Runner if it isn't set. +var RunnerNotSet = fmt.Errorf("cannot run; no Runner set (RunWith)") + +// RunnerNotQueryRunner is returned by QueryRow if the RunWith value doesn't implement QueryRower. +var RunnerNotQueryRunner = fmt.Errorf("cannot QueryRow; Runner is not a QueryRower") + +// ExecWith Execs the SQL returned by s with db. +func ExecWith(db Execer, s Sqlizer) (res sql.Result, err error) { + query, args, err := s.ToSql() + if err != nil { + return + } + return db.Exec(query, args...) +} + +// QueryWith Querys the SQL returned by s with db. +func QueryWith(db Queryer, s Sqlizer) (rows *sql.Rows, err error) { + query, args, err := s.ToSql() + if err != nil { + return + } + return db.Query(query, args...) +} + +// QueryRowWith QueryRows the SQL returned by s with db. +func QueryRowWith(db QueryRower, s Sqlizer) RowScanner { + query, args, err := s.ToSql() + return &Row{RowScanner: db.QueryRow(query, args...), err: err} +} + +// DebugSqlizer calls ToSql on s and shows the approximate SQL to be executed +// +// If ToSql returns an error, the result of this method will look like: +// "[ToSql error: %s]" or "[DebugSqlizer error: %s]" +// +// IMPORTANT: As its name suggests, this function should only be used for +// debugging. While the string result *might* be valid SQL, this function does +// not try very hard to ensure it. Additionally, executing the output of this +// function with any untrusted user input is certainly insecure. +func DebugSqlizer(s Sqlizer) string { + sql, args, err := s.ToSql() + if err != nil { + return fmt.Sprintf("[ToSql error: %s]", err) + } + + var placeholder string + downCast, ok := s.(placeholderDebugger) + if !ok { + placeholder = "?" + } else { + placeholder = downCast.debugPlaceholder() + } + // TODO: dedupe this with placeholder.go + buf := &bytes.Buffer{} + i := 0 + for { + p := strings.Index(sql, placeholder) + if p == -1 { + break + } + if len(sql[p:]) > 1 && sql[p:p+2] == "??" { // escape ?? => ? + buf.WriteString(sql[:p]) + buf.WriteString("?") + if len(sql[p:]) == 1 { + break + } + sql = sql[p+2:] + } else { + if i+1 > len(args) { + return fmt.Sprintf( + "[DebugSqlizer error: too many placeholders in %#v for %d args]", + sql, len(args)) + } + buf.WriteString(sql[:p]) + fmt.Fprintf(buf, "'%v'", args[i]) + // advance our sql string "cursor" beyond the arg we placed + sql = sql[p+1:] + i++ + } + } + if i < len(args) { + return fmt.Sprintf( + "[DebugSqlizer error: not enough placeholders in %#v for %d args]", + sql, len(args)) + } + // "append" any remaning sql that won't need interpolating + buf.WriteString(sql) + return buf.String() +} diff --git a/vendor/github.com/mattermost/squirrel/squirrel_ctx.go b/vendor/github.com/mattermost/squirrel/squirrel_ctx.go new file mode 100644 index 00000000..c20148ad --- /dev/null +++ b/vendor/github.com/mattermost/squirrel/squirrel_ctx.go @@ -0,0 +1,93 @@ +// +build go1.8 + +package squirrel + +import ( + "context" + "database/sql" + "errors" +) + +// NoContextSupport is returned if a db doesn't support Context. +var NoContextSupport = errors.New("DB does not support Context") + +// ExecerContext is the interface that wraps the ExecContext method. +// +// Exec executes the given query as implemented by database/sql.ExecContext. +type ExecerContext interface { + ExecContext(ctx context.Context, query string, args ...interface{}) (sql.Result, error) +} + +// QueryerContext is the interface that wraps the QueryContext method. +// +// QueryContext executes the given query as implemented by database/sql.QueryContext. +type QueryerContext interface { + QueryContext(ctx context.Context, query string, args ...interface{}) (*sql.Rows, error) +} + +// QueryRowerContext is the interface that wraps the QueryRowContext method. +// +// QueryRowContext executes the given query as implemented by database/sql.QueryRowContext. +type QueryRowerContext interface { + QueryRowContext(ctx context.Context, query string, args ...interface{}) RowScanner +} + +// RunnerContext groups the Runner interface, along with the Context versions of each of +// its methods +type RunnerContext interface { + Runner + QueryerContext + QueryRowerContext + ExecerContext +} + +// WrapStdSqlCtx wraps a type implementing the standard SQL interface plus the context +// versions of the methods with methods that squirrel expects. +func WrapStdSqlCtx(stdSqlCtx StdSqlCtx) RunnerContext { + return &stdsqlCtxRunner{stdSqlCtx} +} + +// StdSqlCtx encompasses the standard methods of the *sql.DB type, along with the Context +// versions of those methods, and other types that wrap these methods. +type StdSqlCtx interface { + StdSql + QueryContext(context.Context, string, ...interface{}) (*sql.Rows, error) + QueryRowContext(context.Context, string, ...interface{}) *sql.Row + ExecContext(context.Context, string, ...interface{}) (sql.Result, error) +} + +type stdsqlCtxRunner struct { + StdSqlCtx +} + +func (r *stdsqlCtxRunner) QueryRow(query string, args ...interface{}) RowScanner { + return r.StdSqlCtx.QueryRow(query, args...) +} + +func (r *stdsqlCtxRunner) QueryRowContext(ctx context.Context, query string, args ...interface{}) RowScanner { + return r.StdSqlCtx.QueryRowContext(ctx, query, args...) +} + +// ExecContextWith ExecContexts the SQL returned by s with db. +func ExecContextWith(ctx context.Context, db ExecerContext, s Sqlizer) (res sql.Result, err error) { + query, args, err := s.ToSql() + if err != nil { + return + } + return db.ExecContext(ctx, query, args...) +} + +// QueryContextWith QueryContexts the SQL returned by s with db. +func QueryContextWith(ctx context.Context, db QueryerContext, s Sqlizer) (rows *sql.Rows, err error) { + query, args, err := s.ToSql() + if err != nil { + return + } + return db.QueryContext(ctx, query, args...) +} + +// QueryRowContextWith QueryRowContexts the SQL returned by s with db. +func QueryRowContextWith(ctx context.Context, db QueryRowerContext, s Sqlizer) RowScanner { + query, args, err := s.ToSql() + return &Row{RowScanner: db.QueryRowContext(ctx, query, args...), err: err} +} diff --git a/vendor/github.com/mattermost/squirrel/statement.go b/vendor/github.com/mattermost/squirrel/statement.go new file mode 100644 index 00000000..1c481be2 --- /dev/null +++ b/vendor/github.com/mattermost/squirrel/statement.go @@ -0,0 +1,104 @@ +package squirrel + +import "github.com/lann/builder" + +// StatementBuilderType is the type of StatementBuilder. +type StatementBuilderType builder.Builder + +// Select returns a SelectBuilder for this StatementBuilderType. +func (b StatementBuilderType) Select(columns ...string) SelectBuilder { + return SelectBuilder(b).Columns(columns...) +} + +// Insert returns a InsertBuilder for this StatementBuilderType. +func (b StatementBuilderType) Insert(into string) InsertBuilder { + return InsertBuilder(b).Into(into) +} + +// Replace returns a InsertBuilder for this StatementBuilderType with the +// statement keyword set to "REPLACE". +func (b StatementBuilderType) Replace(into string) InsertBuilder { + return InsertBuilder(b).statementKeyword("REPLACE").Into(into) +} + +// Update returns a UpdateBuilder for this StatementBuilderType. +func (b StatementBuilderType) Update(tables ...string) UpdateBuilder { + return UpdateBuilder(b).Table(tables...) +} + +// Delete returns a DeleteBuilder for this StatementBuilderType. +func (b StatementBuilderType) Delete(from string) DeleteBuilder { + return DeleteBuilder(b).From(from) +} + +// PlaceholderFormat sets the PlaceholderFormat field for any child builders. +func (b StatementBuilderType) PlaceholderFormat(f PlaceholderFormat) StatementBuilderType { + return builder.Set(b, "PlaceholderFormat", f).(StatementBuilderType) +} + +// RunWith sets the RunWith field for any child builders. +func (b StatementBuilderType) RunWith(runner BaseRunner) StatementBuilderType { + return setRunWith(b, runner).(StatementBuilderType) +} + +// Where adds WHERE expressions to the query. +// +// See SelectBuilder.Where for more information. +func (b StatementBuilderType) Where(pred interface{}, args ...interface{}) StatementBuilderType { + return builder.Append(b, "WhereParts", newWherePart(pred, args...)).(StatementBuilderType) +} + +// StatementBuilder is a parent builder for other builders, e.g. SelectBuilder. +var StatementBuilder = StatementBuilderType(builder.EmptyBuilder).PlaceholderFormat(Question) + +// Select returns a new SelectBuilder, optionally setting some result columns. +// +// See SelectBuilder.Columns. +func Select(columns ...string) SelectBuilder { + return StatementBuilder.Select(columns...) +} + +// Insert returns a new InsertBuilder with the given table name. +// +// See InsertBuilder.Into. +func Insert(into string) InsertBuilder { + return StatementBuilder.Insert(into) +} + +// Replace returns a new InsertBuilder with the statement keyword set to +// "REPLACE" and with the given table name. +// +// See InsertBuilder.Into. +func Replace(into string) InsertBuilder { + return StatementBuilder.Replace(into) +} + +// Update returns a new UpdateBuilder with the given table name. +// +// See UpdateBuilder.Table. +func Update(tables ...string) UpdateBuilder { + return StatementBuilder.Update(tables...) +} + +// Delete returns a new DeleteBuilder with the given table name. +// +// See DeleteBuilder.Table. +func Delete(from string) DeleteBuilder { + return StatementBuilder.Delete(from) +} + +// Case returns a new CaseBuilder +// "what" represents case value +func Case(what ...interface{}) CaseBuilder { + b := CaseBuilder(builder.EmptyBuilder) + + switch len(what) { + case 0: + case 1: + b = b.what(what[0]) + default: + b = b.what(newPart(what[0], what[1:]...)) + + } + return b +} diff --git a/vendor/github.com/mattermost/squirrel/stmtcacher.go b/vendor/github.com/mattermost/squirrel/stmtcacher.go new file mode 100644 index 00000000..5bf267a1 --- /dev/null +++ b/vendor/github.com/mattermost/squirrel/stmtcacher.go @@ -0,0 +1,121 @@ +package squirrel + +import ( + "database/sql" + "fmt" + "sync" +) + +// Prepareer is the interface that wraps the Prepare method. +// +// Prepare executes the given query as implemented by database/sql.Prepare. +type Preparer interface { + Prepare(query string) (*sql.Stmt, error) +} + +// DBProxy groups the Execer, Queryer, QueryRower, and Preparer interfaces. +type DBProxy interface { + Execer + Queryer + QueryRower + Preparer +} + +// NOTE: NewStmtCache is defined in stmtcacher_ctx.go (Go >= 1.8) or stmtcacher_noctx.go (Go < 1.8). + +// StmtCache wraps and delegates down to a Preparer type +// +// It also automatically prepares all statements sent to the underlying Preparer calls +// for Exec, Query and QueryRow and caches the returns *sql.Stmt using the provided +// query as the key. So that it can be automatically re-used. +type StmtCache struct { + prep Preparer + cache map[string]*sql.Stmt + mu sync.Mutex +} + +// Prepare delegates down to the underlying Preparer and caches the result +// using the provided query as a key +func (sc *StmtCache) Prepare(query string) (*sql.Stmt, error) { + sc.mu.Lock() + defer sc.mu.Unlock() + + stmt, ok := sc.cache[query] + if ok { + return stmt, nil + } + stmt, err := sc.prep.Prepare(query) + if err == nil { + sc.cache[query] = stmt + } + return stmt, err +} + +// Exec delegates down to the underlying Preparer using a prepared statement +func (sc *StmtCache) Exec(query string, args ...interface{}) (res sql.Result, err error) { + stmt, err := sc.Prepare(query) + if err != nil { + return + } + return stmt.Exec(args...) +} + +// Query delegates down to the underlying Preparer using a prepared statement +func (sc *StmtCache) Query(query string, args ...interface{}) (rows *sql.Rows, err error) { + stmt, err := sc.Prepare(query) + if err != nil { + return + } + return stmt.Query(args...) +} + +// QueryRow delegates down to the underlying Preparer using a prepared statement +func (sc *StmtCache) QueryRow(query string, args ...interface{}) RowScanner { + stmt, err := sc.Prepare(query) + if err != nil { + return &Row{err: err} + } + return stmt.QueryRow(args...) +} + +// Clear removes and closes all the currently cached prepared statements +func (sc *StmtCache) Clear() (err error) { + sc.mu.Lock() + defer sc.mu.Unlock() + + for key, stmt := range sc.cache { + delete(sc.cache, key) + + if stmt == nil { + continue + } + + if cerr := stmt.Close(); cerr != nil { + err = cerr + } + } + + if err != nil { + return fmt.Errorf("one or more Stmt.Close failed; last error: %v", err) + } + + return +} + +type DBProxyBeginner interface { + DBProxy + Begin() (*sql.Tx, error) +} + +type stmtCacheProxy struct { + DBProxy + db *sql.DB +} + +func NewStmtCacheProxy(db *sql.DB) DBProxyBeginner { + return &stmtCacheProxy{DBProxy: NewStmtCache(db), db: db} +} + +func (sp *stmtCacheProxy) Begin() (*sql.Tx, error) { + return sp.db.Begin() +} diff --git a/vendor/github.com/mattermost/squirrel/stmtcacher_ctx.go b/vendor/github.com/mattermost/squirrel/stmtcacher_ctx.go new file mode 100644 index 00000000..53603cf4 --- /dev/null +++ b/vendor/github.com/mattermost/squirrel/stmtcacher_ctx.go @@ -0,0 +1,86 @@ +// +build go1.8 + +package squirrel + +import ( + "context" + "database/sql" +) + +// PrepareerContext is the interface that wraps the Prepare and PrepareContext methods. +// +// Prepare executes the given query as implemented by database/sql.Prepare. +// PrepareContext executes the given query as implemented by database/sql.PrepareContext. +type PreparerContext interface { + Preparer + PrepareContext(ctx context.Context, query string) (*sql.Stmt, error) +} + +// DBProxyContext groups the Execer, Queryer, QueryRower and PreparerContext interfaces. +type DBProxyContext interface { + Execer + Queryer + QueryRower + PreparerContext +} + +// NewStmtCache returns a *StmtCache wrapping a PreparerContext that caches Prepared Stmts. +// +// Stmts are cached based on the string value of their queries. +func NewStmtCache(prep PreparerContext) *StmtCache { + return &StmtCache{prep: prep, cache: make(map[string]*sql.Stmt)} +} + +// NewStmtCacher is deprecated +// +// Use NewStmtCache instead +func NewStmtCacher(prep PreparerContext) DBProxyContext { + return NewStmtCache(prep) +} + +// PrepareContext delegates down to the underlying PreparerContext and caches the result +// using the provided query as a key +func (sc *StmtCache) PrepareContext(ctx context.Context, query string) (*sql.Stmt, error) { + ctxPrep, ok := sc.prep.(PreparerContext) + if !ok { + return nil, NoContextSupport + } + sc.mu.Lock() + defer sc.mu.Unlock() + stmt, ok := sc.cache[query] + if ok { + return stmt, nil + } + stmt, err := ctxPrep.PrepareContext(ctx, query) + if err == nil { + sc.cache[query] = stmt + } + return stmt, err +} + +// ExecContext delegates down to the underlying PreparerContext using a prepared statement +func (sc *StmtCache) ExecContext(ctx context.Context, query string, args ...interface{}) (res sql.Result, err error) { + stmt, err := sc.PrepareContext(ctx, query) + if err != nil { + return + } + return stmt.ExecContext(ctx, args...) +} + +// QueryContext delegates down to the underlying PreparerContext using a prepared statement +func (sc *StmtCache) QueryContext(ctx context.Context, query string, args ...interface{}) (rows *sql.Rows, err error) { + stmt, err := sc.PrepareContext(ctx, query) + if err != nil { + return + } + return stmt.QueryContext(ctx, args...) +} + +// QueryRowContext delegates down to the underlying PreparerContext using a prepared statement +func (sc *StmtCache) QueryRowContext(ctx context.Context, query string, args ...interface{}) RowScanner { + stmt, err := sc.PrepareContext(ctx, query) + if err != nil { + return &Row{err: err} + } + return stmt.QueryRowContext(ctx, args...) +} diff --git a/vendor/github.com/mattermost/squirrel/stmtcacher_noctx.go b/vendor/github.com/mattermost/squirrel/stmtcacher_noctx.go new file mode 100644 index 00000000..deac9677 --- /dev/null +++ b/vendor/github.com/mattermost/squirrel/stmtcacher_noctx.go @@ -0,0 +1,21 @@ +// +build !go1.8 + +package squirrel + +import ( + "database/sql" +) + +// NewStmtCacher returns a DBProxy wrapping prep that caches Prepared Stmts. +// +// Stmts are cached based on the string value of their queries. +func NewStmtCache(prep Preparer) *StmtCache { + return &StmtCacher{prep: prep, cache: make(map[string]*sql.Stmt)} +} + +// NewStmtCacher is deprecated +// +// Use NewStmtCache instead +func NewStmtCacher(prep Preparer) DBProxy { + return NewStmtCache(prep) +} diff --git a/vendor/github.com/mattermost/squirrel/update.go b/vendor/github.com/mattermost/squirrel/update.go new file mode 100644 index 00000000..86f9c9aa --- /dev/null +++ b/vendor/github.com/mattermost/squirrel/update.go @@ -0,0 +1,296 @@ +package squirrel + +import ( + "bytes" + "database/sql" + "fmt" + "sort" + "strings" + + "github.com/lann/builder" +) + +type updateData struct { + PlaceholderFormat PlaceholderFormat + RunWith BaseRunner + Prefixes []Sqlizer + Tables []string + SetClauses []setClause + From []Sqlizer + WhereParts []Sqlizer + OrderBys []string + Limit string + Offset string + Suffixes []Sqlizer +} + +type setClause struct { + column string + value interface{} +} + +func (d *updateData) Exec() (sql.Result, error) { + if d.RunWith == nil { + return nil, RunnerNotSet + } + return ExecWith(d.RunWith, d) +} + +func (d *updateData) Query() (*sql.Rows, error) { + if d.RunWith == nil { + return nil, RunnerNotSet + } + return QueryWith(d.RunWith, d) +} + +func (d *updateData) QueryRow() RowScanner { + if d.RunWith == nil { + return &Row{err: RunnerNotSet} + } + queryRower, ok := d.RunWith.(QueryRower) + if !ok { + return &Row{err: RunnerNotQueryRunner} + } + return QueryRowWith(queryRower, d) +} + +func (d *updateData) ToSql() (sqlStr string, args []interface{}, err error) { + if len(d.Tables) == 0 { + err = fmt.Errorf("update statements must specify a table") + return + } + if len(d.SetClauses) == 0 { + err = fmt.Errorf("update statements must have at least one Set clause") + return + } + + sql := &bytes.Buffer{} + + if len(d.Prefixes) > 0 { + args, err = appendToSql(d.Prefixes, sql, " ", args) + if err != nil { + return + } + + sql.WriteString(" ") + } + + sql.WriteString("UPDATE ") + sql.WriteString(strings.Join(d.Tables, ", ")) + + sql.WriteString(" SET ") + setSqls := make([]string, len(d.SetClauses)) + for i, setClause := range d.SetClauses { + var valSql string + if vs, ok := setClause.value.(Sqlizer); ok { + vsql, vargs, err := vs.ToSql() + if err != nil { + return "", nil, err + } + if _, ok := vs.(SelectBuilder); ok { + valSql = fmt.Sprintf("(%s)", vsql) + } else { + valSql = vsql + } + args = append(args, vargs...) + } else { + valSql = "?" + args = append(args, setClause.value) + } + setSqls[i] = fmt.Sprintf("%s = %s", setClause.column, valSql) + } + sql.WriteString(strings.Join(setSqls, ", ")) + + if len(d.From) > 0 { + sql.WriteString(" FROM ") + args, err = appendToSql(d.From, sql, ", ", args) + if err != nil { + return + } + } + + if len(d.WhereParts) > 0 { + sql.WriteString(" WHERE ") + args, err = appendToSql(d.WhereParts, sql, " AND ", args) + if err != nil { + return + } + } + + if len(d.OrderBys) > 0 { + sql.WriteString(" ORDER BY ") + sql.WriteString(strings.Join(d.OrderBys, ", ")) + } + + if len(d.Limit) > 0 { + sql.WriteString(" LIMIT ") + sql.WriteString(d.Limit) + } + + if len(d.Offset) > 0 { + sql.WriteString(" OFFSET ") + sql.WriteString(d.Offset) + } + + if len(d.Suffixes) > 0 { + sql.WriteString(" ") + args, err = appendToSql(d.Suffixes, sql, " ", args) + if err != nil { + return + } + } + + sqlStr, err = d.PlaceholderFormat.ReplacePlaceholders(sql.String()) + return +} + +// Builder + +// UpdateBuilder builds SQL UPDATE statements. +type UpdateBuilder builder.Builder + +func init() { + builder.Register(UpdateBuilder{}, updateData{}) +} + +// Format methods + +// PlaceholderFormat sets PlaceholderFormat (e.g. Question or Dollar) for the +// query. +func (b UpdateBuilder) PlaceholderFormat(f PlaceholderFormat) UpdateBuilder { + return builder.Set(b, "PlaceholderFormat", f).(UpdateBuilder) +} + +// Runner methods + +// RunWith sets a Runner (like database/sql.DB) to be used with e.g. Exec. +func (b UpdateBuilder) RunWith(runner BaseRunner) UpdateBuilder { + return setRunWith(b, runner).(UpdateBuilder) +} + +// Exec builds and Execs the query with the Runner set by RunWith. +func (b UpdateBuilder) Exec() (sql.Result, error) { + data := builder.GetStruct(b).(updateData) + return data.Exec() +} + +func (b UpdateBuilder) Query() (*sql.Rows, error) { + data := builder.GetStruct(b).(updateData) + return data.Query() +} + +func (b UpdateBuilder) QueryRow() RowScanner { + data := builder.GetStruct(b).(updateData) + return data.QueryRow() +} + +func (b UpdateBuilder) Scan(dest ...interface{}) error { + return b.QueryRow().Scan(dest...) +} + +// SQL methods + +// ToSql builds the query into a SQL string and bound args. +func (b UpdateBuilder) ToSql() (string, []interface{}, error) { + data := builder.GetStruct(b).(updateData) + return data.ToSql() +} + +// MustSql builds the query into a SQL string and bound args. +// It panics if there are any errors. +func (b UpdateBuilder) MustSql() (string, []interface{}) { + sql, args, err := b.ToSql() + if err != nil { + panic(err) + } + return sql, args +} + +// Prefix adds an expression to the beginning of the query +func (b UpdateBuilder) Prefix(sql string, args ...interface{}) UpdateBuilder { + return b.PrefixExpr(Expr(sql, args...)) +} + +// PrefixExpr adds an expression to the very beginning of the query +func (b UpdateBuilder) PrefixExpr(expr Sqlizer) UpdateBuilder { + return builder.Append(b, "Prefixes", expr).(UpdateBuilder) +} + +// Table sets the table to be updated. +// Additional tables are used with supporting databases to implicitly join. +func (b UpdateBuilder) Table(tables ...string) UpdateBuilder { + nonEmptyTables := make([]string, 0, len(tables)) + for _, table := range tables { + if table != "" { + nonEmptyTables = append(nonEmptyTables, table) + } + } + + return builder.Set(b, "Tables", nonEmptyTables).(UpdateBuilder) +} + +// Set adds SET clauses to the query. +func (b UpdateBuilder) Set(column string, value interface{}) UpdateBuilder { + return builder.Append(b, "SetClauses", setClause{column: column, value: value}).(UpdateBuilder) +} + +// SetMap is a convenience method which calls .Set for each key/value pair in clauses. +func (b UpdateBuilder) SetMap(clauses map[string]interface{}) UpdateBuilder { + keys := make([]string, len(clauses)) + i := 0 + for key := range clauses { + keys[i] = key + i++ + } + sort.Strings(keys) + for _, key := range keys { + val, _ := clauses[key] + b = b.Set(key, val) + } + return b +} + +// From adds FROM clause to the query +// FROM is valid construct in postgresql only. +func (b UpdateBuilder) From(from string) UpdateBuilder { + return builder.Append(b, "From", newPart(from)).(UpdateBuilder) +} + +// FromSelect sets a subquery into the FROM clause of the query. +func (b UpdateBuilder) FromSelect(from SelectBuilder, alias string) UpdateBuilder { + // Prevent misnumbered parameters in nested selects (#183). + from = from.PlaceholderFormat(Question) + return builder.Append(b, "From", Alias(from, alias)).(UpdateBuilder) +} + +// Where adds WHERE expressions to the query. +// +// See SelectBuilder.Where for more information. +func (b UpdateBuilder) Where(pred interface{}, args ...interface{}) UpdateBuilder { + return builder.Append(b, "WhereParts", newWherePart(pred, args...)).(UpdateBuilder) +} + +// OrderBy adds ORDER BY expressions to the query. +func (b UpdateBuilder) OrderBy(orderBys ...string) UpdateBuilder { + return builder.Extend(b, "OrderBys", orderBys).(UpdateBuilder) +} + +// Limit sets a LIMIT clause on the query. +func (b UpdateBuilder) Limit(limit uint64) UpdateBuilder { + return builder.Set(b, "Limit", fmt.Sprintf("%d", limit)).(UpdateBuilder) +} + +// Offset sets a OFFSET clause on the query. +func (b UpdateBuilder) Offset(offset uint64) UpdateBuilder { + return builder.Set(b, "Offset", fmt.Sprintf("%d", offset)).(UpdateBuilder) +} + +// Suffix adds an expression to the end of the query +func (b UpdateBuilder) Suffix(sql string, args ...interface{}) UpdateBuilder { + return b.SuffixExpr(Expr(sql, args...)) +} + +// SuffixExpr adds an expression to the end of the query +func (b UpdateBuilder) SuffixExpr(expr Sqlizer) UpdateBuilder { + return builder.Append(b, "Suffixes", expr).(UpdateBuilder) +} diff --git a/vendor/github.com/mattermost/squirrel/update_ctx.go b/vendor/github.com/mattermost/squirrel/update_ctx.go new file mode 100644 index 00000000..ad479f96 --- /dev/null +++ b/vendor/github.com/mattermost/squirrel/update_ctx.go @@ -0,0 +1,69 @@ +// +build go1.8 + +package squirrel + +import ( + "context" + "database/sql" + + "github.com/lann/builder" +) + +func (d *updateData) ExecContext(ctx context.Context) (sql.Result, error) { + if d.RunWith == nil { + return nil, RunnerNotSet + } + ctxRunner, ok := d.RunWith.(ExecerContext) + if !ok { + return nil, NoContextSupport + } + return ExecContextWith(ctx, ctxRunner, d) +} + +func (d *updateData) QueryContext(ctx context.Context) (*sql.Rows, error) { + if d.RunWith == nil { + return nil, RunnerNotSet + } + ctxRunner, ok := d.RunWith.(QueryerContext) + if !ok { + return nil, NoContextSupport + } + return QueryContextWith(ctx, ctxRunner, d) +} + +func (d *updateData) QueryRowContext(ctx context.Context) RowScanner { + if d.RunWith == nil { + return &Row{err: RunnerNotSet} + } + queryRower, ok := d.RunWith.(QueryRowerContext) + if !ok { + if _, ok := d.RunWith.(QueryerContext); !ok { + return &Row{err: RunnerNotQueryRunner} + } + return &Row{err: NoContextSupport} + } + return QueryRowContextWith(ctx, queryRower, d) +} + +// ExecContext builds and ExecContexts the query with the Runner set by RunWith. +func (b UpdateBuilder) ExecContext(ctx context.Context) (sql.Result, error) { + data := builder.GetStruct(b).(updateData) + return data.ExecContext(ctx) +} + +// QueryContext builds and QueryContexts the query with the Runner set by RunWith. +func (b UpdateBuilder) QueryContext(ctx context.Context) (*sql.Rows, error) { + data := builder.GetStruct(b).(updateData) + return data.QueryContext(ctx) +} + +// QueryRowContext builds and QueryRowContexts the query with the Runner set by RunWith. +func (b UpdateBuilder) QueryRowContext(ctx context.Context) RowScanner { + data := builder.GetStruct(b).(updateData) + return data.QueryRowContext(ctx) +} + +// ScanContext is a shortcut for QueryRowContext().Scan. +func (b UpdateBuilder) ScanContext(ctx context.Context, dest ...interface{}) error { + return b.QueryRowContext(ctx).Scan(dest...) +} diff --git a/vendor/github.com/mattermost/squirrel/where.go b/vendor/github.com/mattermost/squirrel/where.go new file mode 100644 index 00000000..976b63ac --- /dev/null +++ b/vendor/github.com/mattermost/squirrel/where.go @@ -0,0 +1,30 @@ +package squirrel + +import ( + "fmt" +) + +type wherePart part + +func newWherePart(pred interface{}, args ...interface{}) Sqlizer { + return &wherePart{pred: pred, args: args} +} + +func (p wherePart) ToSql() (sql string, args []interface{}, err error) { + switch pred := p.pred.(type) { + case nil: + // no-op + case rawSqlizer: + return pred.toSqlRaw() + case Sqlizer: + return pred.ToSql() + case map[string]interface{}: + return Eq(pred).ToSql() + case string: + sql = pred + args = p.args + default: + err = fmt.Errorf("expected string-keyed map or string, not %T", pred) + } + return +} diff --git a/vendor/github.com/mattermost/xml-roundtrip-validator/LICENSE.txt b/vendor/github.com/mattermost/xml-roundtrip-validator/LICENSE.txt new file mode 100644 index 00000000..261eeb9e --- /dev/null +++ b/vendor/github.com/mattermost/xml-roundtrip-validator/LICENSE.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/mattermost/xml-roundtrip-validator/README.md b/vendor/github.com/mattermost/xml-roundtrip-validator/README.md new file mode 100644 index 00000000..ec81a2a6 --- /dev/null +++ b/vendor/github.com/mattermost/xml-roundtrip-validator/README.md @@ -0,0 +1,73 @@ +# xml-roundtrip-validator + +The Go module `github.com/mattermost/xml-roundtrip-validator` implements mitigations for multiple security issues in Go's `encoding/xml`. Applications that use `encoding/xml` for security-critical operations, such as XML signature validation and SAML, may use the `Validate` and `ValidateAll` functions to avoid impact from malicious XML inputs. + +## Usage + +### Validate + +```Go +import ( + "strings" + + xrv "github.com/mattermost/xml-roundtrip-validator" +) + +func DoStuffWithXML(input string) { + if err := xrv.Validate(strings.NewReader(input)); err != nil { + panic(err) + } + // validation succeeded, input is safe + actuallyDoStuffWithXML(input) +} +``` + +### ValidateAll + +```Go +import ( + "strings" + + xrv "github.com/mattermost/xml-roundtrip-validator" +) + +func DoStuffWithXML(input string) { + if errs := xrv.ValidateAll(strings.NewReader(input)); len(errs) != 0 { + for err := range errs { + // here you can log each error individually if you like + } + return + } + // validation succeeded, input is safe + actuallyDoStuffWithXML(input) +} +``` + +### CLI + +Compiling: + +``` +$ go build cmd/xrv.go +``` + +Running: + +``` +$ ./xrv good.xml +Document validated without errors +$ ./xrv bad.xml +validator: in token starting at 2:5: roundtrip error: expected {{ :Element} []}, observed {{ Element} []} +$ ./xrv -all bad.xml +validator: in token starting at 2:5: roundtrip error: expected {{ :Element} []}, observed {{ Element} []} +validator: in token starting at 3:5: roundtrip error: expected {{ Element} [{{ :attr} z}]}, observed {{ Element} [{{ attr} z}]} +``` + +## Go vulnerabilities addressed + +Descriptions of the Go vulnerabilities addressed by this module can be found in the advisories directory. Specifically, the issues addressed are: + + - [Element namespace prefix instability](./advisories/unstable-elements.md) + - [Attribute namespace prefix instability](./advisories/unstable-attributes.md) + - [Directive comment instability](./advisories/unstable-directives.md) + - Any other similar roundtrip issues we may not know about diff --git a/vendor/github.com/mattermost/xml-roundtrip-validator/SECURITY.md b/vendor/github.com/mattermost/xml-roundtrip-validator/SECURITY.md new file mode 100644 index 00000000..4cb6c58d --- /dev/null +++ b/vendor/github.com/mattermost/xml-roundtrip-validator/SECURITY.md @@ -0,0 +1,25 @@ +Security +======== + +Safety and data security is of the utmost priority for the Mattermost community. If you are a security researcher and have discovered a security vulnerability in our codebase, we would appreciate your help in disclosing it to us in a responsible manner. + +Reporting security issues +------------------------- + +**Please do not use GitHub issues for security-sensitive communication.** + +Security issues in the community test server, any of the open source codebases maintained by Mattermost, or any of our commercial offerings should be reported via email to [responsibledisclosure@mattermost.com](mailto:responsibledisclosure@mattermost.com). Mattermost is committed to working together with researchers and keeping them updated throughout the patching process. Researchers who responsibly report valid security issues will be publicly credited for their efforts (if they so choose). + +For a more detailed description of the disclosure process and a list of researchers who have previously contributed to the disclosure program, see [Report a Security Vulnerability](https://mattermost.com/security-vulnerability-report/) on the Mattermost website. + +Security updates +---------------- + +Mattermost has a mandatory upgrade policy, and updates are only provided for the latest 3 releases and the current Extended Support Release (ESR). Critical updates are delivered as dot releases. Details on security updates are announced 30 days after the availability of the update. + +For more details about the security content of past releases, see the [Security Updates](https://mattermost.com/security-updates/) page on the Mattermost website. For timely notifications about new security updates, subscribe to the [Security Bulletins Mailing List](https://about.mattermost.com/security-bulletin). + +Contributing to this policy +--------------------------- + +If you have feedback or suggestions on improving this policy document, please [create an issue](https://github.com/mattermost/mattermost-server/issues/new). diff --git a/vendor/github.com/mattermost/xml-roundtrip-validator/validator.go b/vendor/github.com/mattermost/xml-roundtrip-validator/validator.go new file mode 100644 index 00000000..523659dc --- /dev/null +++ b/vendor/github.com/mattermost/xml-roundtrip-validator/validator.go @@ -0,0 +1,292 @@ +package validator + +import ( + "bytes" + "encoding/xml" + "errors" + "fmt" + "io" +) + +// XMLRoundtripError is returned when a round-trip token doesn't match the original +type XMLRoundtripError struct { + Expected, Observed xml.Token + Overflow []byte +} + +func (err XMLRoundtripError) Error() string { + if len(err.Overflow) == 0 { + return fmt.Sprintf("roundtrip error: expected %v, observed %v", err.Expected, err.Observed) + } + return fmt.Sprintf("roundtrip error: unexpected overflow after token: %s", err.Overflow) +} + +// XMLValidationError is returned when validating an XML document fails +type XMLValidationError struct { + Start, End, Line, Column int64 + err error +} + +func (err XMLValidationError) Error() string { + return fmt.Sprintf("validator: in token starting at %d:%d: %s", err.Line, err.Column, err.err.Error()) +} + +func (err XMLValidationError) Unwrap() error { + return err.err +} + +// Validate makes sure the given XML bytes survive round trips through encoding/xml without mutations +func Validate(xmlReader io.Reader) error { + xmlBuffer := &bytes.Buffer{} + xmlReader = &byteReader{io.TeeReader(xmlReader, xmlBuffer)} + decoder := xml.NewDecoder(xmlReader) + decoder.Strict = false + decoder.CharsetReader = func(charset string, input io.Reader) (io.Reader, error) { return input, nil } + offset := int64(0) + for { + token, err := decoder.RawToken() + if err == io.EOF { + return nil + } else if err != nil { + return err + } + if err := CheckToken(token); err != nil { + xmlBytes := xmlBuffer.Bytes() + line := bytes.Count(xmlBytes[0:offset], []byte{'\n'}) + 1 + lineStart := int64(bytes.LastIndexByte(xmlBytes[0:offset], '\n')) + 1 + column := offset - lineStart + 1 + return XMLValidationError{ + Start: offset, + End: decoder.InputOffset(), + Line: int64(line), + Column: column, + err: err, + } + } + offset = decoder.InputOffset() + } +} + +// ValidateAll is like Validate, but instead of returning after the first error, +// it accumulates errors and validates the entire document +func ValidateAll(xmlReader io.Reader) []error { + xmlBuffer := &bytes.Buffer{} + xmlReader = io.TeeReader(xmlReader, xmlBuffer) + errs := []error{} + offset := int64(0) + line := int64(1) + column := int64(1) + for { + err := Validate(xmlReader) + if err == nil { + // reached the end with no additional errors + break + } + validationError := XMLValidationError{} + if errors.As(err, &validationError) { + // validation errors contain line numbers and offsets, but + // these offsets are based on the offset where Validate + // was called, so they need to be adjusted to accordingly + validationError.Start += offset + validationError.End += offset + if validationError.Line == 1 { + validationError.Column += column - 1 + } + validationError.Line += line - 1 + errs = append(errs, validationError) + xmlBytes := xmlBuffer.Bytes() + offset += int64(len(xmlBytes)) + newLines := int64(bytes.Count(xmlBytes, []byte("\n"))) + line += newLines + if newLines > 0 { + column = int64(len(xmlBytes) - bytes.LastIndex(xmlBytes, []byte("\n"))) + } else { + column += int64(len(xmlBytes)) + } + xmlBuffer.Reset() + } else { + // this was not a validation error, but likely + // completely unparseable XML instead; no point + // in trying to continue + errs = append(errs, err) + break + } + } + return errs +} + +// bufio implements a ByteReader but we explicitly don't want any buffering +type byteReader struct { + r io.Reader +} + +func (r *byteReader) ReadByte() (byte, error) { + var p [1]byte + n, err := r.r.Read(p[:]) + + // The doc for the io.ByteReader interface states: + // If ReadByte returns an error, no input byte was consumed, and the returned byte value is undefined. + // So if a byte is actually extracted from the reader, and we want to return it, we mustn't return the error. + if n > 0 { + // this byteReader is only used in the context of the Validate() function, + // we deliberately choose to completely ignore the error in this case. + // return the byte extracted from the reader + return p[0], nil + } + + return 0, err +} + +func (r *byteReader) Read(p []byte) (int, error) { + return r.r.Read(p) +} + +// CheckToken computes a round trip for a given xml.Token and returns an +// error if the newly calculated token differs from the original +func CheckToken(before xml.Token) error { + buffer := &bytes.Buffer{} + encoder := xml.NewEncoder(buffer) + + switch t := before.(type) { + case xml.EndElement: + // xml.Encoder expects matching StartElements for all EndElements + if err := encoder.EncodeToken(xml.StartElement{Name: t.Name}); err != nil { + return err + } + } + + if err := encoder.EncodeToken(before); err != nil { + return err + } + if err := encoder.Flush(); err != nil { + return err + } + encoded := buffer.Bytes() + decoder := xml.NewDecoder(bytes.NewReader(encoded)) + decoder.CharsetReader = func(charset string, input io.Reader) (io.Reader, error) { return input, nil } + + switch before.(type) { + case xml.EndElement: + // throw away the StartElement we added above + if _, err := decoder.RawToken(); err != nil { + return err + } + } + + after, err := decoder.RawToken() + if err != nil { + return err + } + + if !tokenEquals(before, after) { + return XMLRoundtripError{before, after, nil} + } + offset := decoder.InputOffset() + if offset != int64(len(encoded)) { + // this is likely unreachable, but just in case + return XMLRoundtripError{before, after, encoded[offset:]} + } + return nil +} + +func tokenEquals(before, after xml.Token) bool { + switch t1 := before.(type) { + + case xml.CharData: + t2, ok := after.(xml.CharData) + if !ok { + return false + } + return bytes.Equal(t1, t2) + + case xml.Comment: + t2, ok := after.(xml.Comment) + if !ok { + return false + } + return bytes.Equal(t1, t2) + + case xml.Directive: + t2, ok := after.(xml.Directive) + if !ok { + return false + } + return bytes.Equal(t1, t2) + + case xml.EndElement: + t2, ok := after.(xml.EndElement) + if !ok { + return false + } + // local name should equal; namespace prefixes get erased + return t1.Name.Local == t2.Name.Local && t2.Name.Space == "" + + case xml.ProcInst: + t2, ok := after.(xml.ProcInst) + if !ok { + return false + } + return t1.Target == t2.Target && bytes.Equal(t1.Inst, t2.Inst) + + case xml.StartElement: + t2, ok := after.(xml.StartElement) + if !ok { + return false + } + // encoding/xml messes up namespace prefixes on both tag and attribute names; + // they need adjusting to make the comparison possible + fixNamespacePrefixes(&t1, &t2) + if t1.Name != t2.Name { + return false + } + if len(t1.Attr) != len(t2.Attr) { + return false + } + // after the call to fixNamespacePrefixes, all attributes should match; + // ordering is preserved + for i, attr := range t1.Attr { + if attr != t2.Attr[i] { + return false + } + } + return true + } + return false +} + +func fixNamespacePrefixes(before, after *xml.StartElement) { + // if the after token has more attributes than the before token, + // the round trip likely introduced new xmlns attributes + if len(after.Attr) > len(before.Attr) { + + // handle erased tag prefixes; the corresponding xmlns attribute is always the first one + if (before.Name.Space != "" && after.Name.Space == "" && after.Attr[0].Name == xml.Name{Local: "xmlns"}) { + after.Name.Space = after.Attr[0].Value + after.Attr = after.Attr[1:] + } + + // handle attribute prefixes; the xmlns attribute always comes immediately before the prefixed attribute + for len(after.Attr) > len(before.Attr) && len(after.Attr) > 1 { + var xmlns *xml.Attr + i := 1 + for ; i < len(after.Attr); i++ { + if after.Attr[i-1].Name.Space == "xmlns" && after.Attr[i-1].Name.Local == after.Attr[i].Name.Space { + xmlns = &after.Attr[i-1] + break + } + } + if xmlns == nil { + break + } + prefix := xmlns.Name.Local + space := xmlns.Value + copy(after.Attr[i-1:], after.Attr[i:]) + after.Attr = after.Attr[:len(after.Attr)-1] + for j := range after.Attr { + if after.Attr[j].Name.Space == prefix { + after.Attr[j].Name.Space = space + } + } + } + } +}