Documentation ¶
Index ¶
- Constants
- Variables
- func AddMissingSlashToURL(baseUrl *string)
- func CalcTickInterval(numOfWorks int, duration time.Duration) (time.Duration, errors.Error)
- func CallDB(f func(any, ...dal.Clause) errors.Error, x any, clauses ...dal.Clause) errors.Error
- func ConvertStringToTime(timeString string) (t time.Time, err error)
- func Decode(source interface{}, target interface{}, vld *validator.Validate) errors.Error
- func DecodeHook(f reflect.Type, t reflect.Type, data interface{}) (interface{}, error)
- func DecodeMapStruct(input map[string]interface{}, result interface{}, zeroFields bool) errors.Error
- func DecodeStruct(output *viper.Viper, input interface{}, data map[string]interface{}, ...) errors.Error
- func EncodeStruct(input *viper.Viper, output interface{}, tag string) errors.Error
- func ExtractCommitSha(repoPatterns []*regexp.Regexp, commitUrl string) string
- func GetLimitOffset(q url.Values, pageSizeKey, pageKey string) (limit int, offset int)
- func GetRawMessageArrayFromResponse(res *http.Response) ([]json.RawMessage, errors.Error)
- func GetRawMessageDirectFromResponse(res *http.Response) ([]json.RawMessage, errors.Error)
- func GetURIStringPointer(baseUrl string, relativePath string, query url.Values) (*string, errors.Error)
- func Iso8601TimeToTime(iso8601Time *Iso8601Time) *time.Time
- func MakePipelinePlanSubtasks(subtaskMetas []plugin.SubTaskMeta, entities []string) ([]string, errors.Error)
- func NewStatefulApiCollectorForFinalizableEntity(args FinalizableApiCollectorArgs) (plugin.SubTask, errors.Error)
- func RefineIssueRepoCommit(item *crossdomain.IssueRepoCommit, repoPatterns []*regexp.Regexp, ...) *crossdomain.IssueRepoCommit
- func RemoveStartingSlashFromPath(relativePath string) string
- func UnmarshalResponse(res *http.Response, v interface{}) errors.Error
- func UnmarshalResponseXML(res *http.Response, v interface{}) errors.Error
- type AccessToken
- type ApiAsyncClient
- func (apiClient *ApiAsyncClient) DoAsync(method string, path string, query url.Values, body interface{}, ...)
- func (apiClient *ApiAsyncClient) DoGetAsync(path string, query url.Values, header http.Header, ...)
- func (apiClient *ApiAsyncClient) DoPostAsync(path string, query url.Values, body interface{}, header http.Header, ...)
- func (apiClient *ApiAsyncClient) GetMaxRetry() int
- func (apiClient *ApiAsyncClient) GetNumOfWorkers() int
- func (apiClient *ApiAsyncClient) SetMaxRetry(maxRetry int)
- type ApiClient
- func (apiClient *ApiClient) Do(method string, path string, query url.Values, body interface{}, ...) (*http.Response, errors.Error)
- func (apiClient *ApiClient) Get(path string, query url.Values, headers http.Header) (*http.Response, errors.Error)
- func (apiClient *ApiClient) GetAfterFunction() common.ApiClientAfterResponse
- func (apiClient *ApiClient) GetBeforeFunction() common.ApiClientBeforeRequest
- func (apiClient *ApiClient) GetData(name string) interface{}
- func (apiClient *ApiClient) GetEndpoint() string
- func (apiClient *ApiClient) GetHeaders() map[string]string
- func (apiClient *ApiClient) GetTimeout() time.Duration
- func (apiClient *ApiClient) Post(path string, query url.Values, body interface{}, headers http.Header) (*http.Response, errors.Error)
- func (apiClient *ApiClient) SetAfterFunction(callback common.ApiClientAfterResponse)
- func (apiClient *ApiClient) SetBeforeFunction(callback common.ApiClientBeforeRequest)
- func (apiClient *ApiClient) SetContext(ctx gocontext.Context)
- func (apiClient *ApiClient) SetData(name string, data interface{})
- func (apiClient *ApiClient) SetEndpoint(endpoint string)
- func (apiClient *ApiClient) SetHeaders(headers map[string]string)
- func (apiClient *ApiClient) SetLogger(logger log.Logger)
- func (apiClient *ApiClient) SetProxy(proxyUrl string) errors.Error
- func (apiClient *ApiClient) SetTimeout(timeout time.Duration)
- func (apiClient *ApiClient) Setup(endpoint string, headers map[string]string, timeout time.Duration)
- type ApiCollector
- type ApiCollectorArgs
- type ApiCollectorStateManager
- type ApiExtractor
- type ApiExtractorArgs
- type ApiRateLimitCalculator
- type AppKey
- type AsyncResponseHandler
- type BaseConnection
- type BaseRemoteGroupResponse
- type BasicAuth
- type BatchSave
- type BatchSaveDivider
- type CSTTime
- type ConnectionApiHelper
- func (c *ConnectionApiHelper) Create(connection interface{}, input *plugin.ApiResourceInput) errors.Error
- func (c *ConnectionApiHelper) Delete(connection interface{}) (*services.BlueprintProjectPairs, errors.Error)
- func (c *ConnectionApiHelper) First(connection interface{}, params map[string]string) errors.Error
- func (c *ConnectionApiHelper) FirstById(connection interface{}, id uint64) errors.Error
- func (c *ConnectionApiHelper) List(connections interface{}) errors.Error
- func (c *ConnectionApiHelper) Patch(connection interface{}, input *plugin.ApiResourceInput) errors.Error
- type CursorPager
- type DalCursorIterator
- type DataConvertHandler
- type DataConverter
- type DataConverterArgs
- type DataEnrichHandler
- type DataEnricher
- type DataEnricherArgs
- type DateIterator
- type DatePair
- type DateTime
- type DateTimeFormatItem
- type FinalizableApiCollectorArgs
- type FinalizableApiCollectorCommonArgs
- type FinalizableApiCollectorDetailArgs
- type FinalizableApiCollectorListArgs
- type FirstPageTokenOutput
- type GenericScopeApiHelper
- func (gs *GenericScopeApiHelper[Conn, Scope, ScopeConfig]) DbHelper() ScopeDatabaseHelper[Conn, Scope, ScopeConfig]
- func (gs *GenericScopeApiHelper[Conn, Scope, ScopeConfig]) DeleteScope(input *plugin.ApiResourceInput) (*serviceHelper.BlueprintProjectPairs, errors.Error)
- func (gs *GenericScopeApiHelper[Conn, Scope, ScopeConfig]) GetScope(input *plugin.ApiResourceInput) (*ScopeRes[Scope, ScopeConfig], errors.Error)
- func (gs *GenericScopeApiHelper[Conn, Scope, ScopeConfig]) GetScopes(input *plugin.ApiResourceInput) ([]*ScopeRes[Scope, ScopeConfig], errors.Error)
- func (gs *GenericScopeApiHelper[Conn, Scope, ScopeConfig]) PutScopes(input *plugin.ApiResourceInput, scopes []*Scope) ([]*ScopeRes[Scope, ScopeConfig], errors.Error)
- func (gs *GenericScopeApiHelper[Conn, Scope, ScopeConfig]) UpdateScope(input *plugin.ApiResourceInput) (*ScopeRes[Scope, ScopeConfig], errors.Error)
- type GraphqlAsyncClient
- func (apiClient *GraphqlAsyncClient) GetMaxRetry() (int, time.Duration)
- func (apiClient *GraphqlAsyncClient) NextTick(task func() errors.Error, taskErrorChecker func(err error))
- func (apiClient *GraphqlAsyncClient) Query(q interface{}, variables map[string]interface{}) ([]graphql.DataError, error)
- func (apiClient *GraphqlAsyncClient) Release()
- func (apiClient *GraphqlAsyncClient) SetGetRateCost(getRateCost func(q interface{}) int)
- func (apiClient *GraphqlAsyncClient) SetMaxRetry(maxRetry int, waitBeforeRetry time.Duration)
- func (apiClient *GraphqlAsyncClient) Wait()
- type GraphqlAsyncResponseHandler
- type GraphqlCollector
- func (collector *GraphqlCollector) BatchSaveWithOrigin(divider *BatchSaveDivider, results []interface{}, row *RawData) errors.Error
- func (collector *GraphqlCollector) Execute() errors.Error
- func (collector *GraphqlCollector) ExtractExistRawData(divider *BatchSaveDivider) errors.Error
- func (collector *GraphqlCollector) HasError() bool
- type GraphqlCollectorArgs
- type GraphqlQueryPageInfo
- type GraphqlRequestData
- type Iso8601Time
- func (jt Iso8601Time) MarshalJSON() ([]byte, error)
- func (jt *Iso8601Time) Scan(v interface{}) error
- func (jt *Iso8601Time) String() string
- func (jt *Iso8601Time) ToNullableTime() *time.Time
- func (jt *Iso8601Time) ToTime() time.Time
- func (jt *Iso8601Time) UnmarshalJSON(b []byte) error
- func (jt *Iso8601Time) Value() (driver.Value, error)
- type Iterator
- type ListBaseNode
- type MultiAuth
- func (ma *MultiAuth) GetApiAuthenticator(connection plugin.ApiConnection) (plugin.ApiAuthenticator, errors.Error)
- func (ma *MultiAuth) SetupAuthenticationForConnection(connection plugin.ApiConnection, req *http.Request) errors.Error
- func (ma *MultiAuth) ValidateConnection(connection interface{}, v *validator.Validate) errors.Error
- type NoRemoteGroupResponse
- type NoScopeConfig
- type Pager
- type Queue
- func (q *Queue) Clean()
- func (q *Queue) CleanWithOutLock()
- func (q *Queue) Finish(count int64)
- func (q *Queue) GetCount() int64
- func (q *Queue) GetCountWithOutLock() int64
- func (q *Queue) GetCountWithWorkingBlock() int64
- func (q *Queue) Pull() QueueNode
- func (q *Queue) PullWithOutLock() QueueNode
- func (q *Queue) PullWithWorkingBlock() QueueNode
- func (q *Queue) Push(node QueueNode)
- func (q *Queue) PushWithoutLock(node QueueNode)
- type QueueIterator
- type QueueIteratorNode
- type QueueNode
- type RateLimitedApiClient
- type RawData
- type RawDataSubTask
- type RawDataSubTaskArgs
- type ReflectionParameters
- type RegexEnricher
- func (r *RegexEnricher) AddRegexp(patterns ...string) errors.Error
- func (r *RegexEnricher) GetEnrichResult(pattern string, v string, result string) string
- func (r *RegexEnricher) ReturnNameIfMatched(name string, targets ...string) string
- func (r *RegexEnricher) ReturnNameIfOmittedOrMatched(name string, targets ...string) string
- func (r *RegexEnricher) TryAdd(name, pattern string) errors.Error
- type RemoteApiHelper
- func (r *RemoteApiHelper[Conn, Scope, ApiScope, Group]) GetScopesFromRemote(input *plugin.ApiResourceInput, ...) (*plugin.ApiResourceOutput, errors.Error)
- func (r *RemoteApiHelper[Conn, Scope, ApiScope, Group]) PrepareFirstPageToken(customInfo string) (*plugin.ApiResourceOutput, errors.Error)
- func (r *RemoteApiHelper[Conn, Scope, ApiScope, Group]) SearchRemoteScopes(input *plugin.ApiResourceInput, ...) (*plugin.ApiResourceOutput, errors.Error)
- type RemoteQueryData
- type RemoteScopesChild
- type RemoteScopesOutput
- type RequestData
- type RestConnection
- type ScopeApiHelper
- func (c *ScopeApiHelper[Conn, Scope, Tr]) Delete(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error)
- func (c *ScopeApiHelper[Conn, Scope, Tr]) GetScope(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error)
- func (c *ScopeApiHelper[Conn, Scope, Tr]) GetScopeList(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error)
- func (c *ScopeApiHelper[Conn, Scope, Tr]) Put(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error)
- func (c *ScopeApiHelper[Conn, Scope, Tr]) Update(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error)
- type ScopeConfigHelper
- func (t ScopeConfigHelper[ScopeConfig]) Create(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error)
- func (t ScopeConfigHelper[ScopeConfig]) Delete(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error)
- func (t ScopeConfigHelper[ScopeConfig]) Get(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error)
- func (t ScopeConfigHelper[ScopeConfig]) List(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error)
- func (t ScopeConfigHelper[ScopeConfig]) Update(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error)
- type ScopeDatabaseHelper
- type ScopeDatabaseHelperImpl
- func (s *ScopeDatabaseHelperImpl[Conn, Scope, Tr]) DeleteScope(scope *Scope) errors.Error
- func (s *ScopeDatabaseHelperImpl[Conn, Scope, Tr]) GetScope(connectionId uint64, scopeId string) (*Scope, errors.Error)
- func (s *ScopeDatabaseHelperImpl[Conn, Scope, Tr]) GetScopeAndConfig(connectionId uint64, scopeId string) (*Scope, *Tr, errors.Error)
- func (s *ScopeDatabaseHelperImpl[Conn, Scope, Tr]) GetScopeConfig(ruleId uint64) (*Tr, errors.Error)
- func (s *ScopeDatabaseHelperImpl[Conn, Scope, Tr]) ListScopeConfigs(ruleIds []uint64) ([]*Tr, errors.Error)
- func (s *ScopeDatabaseHelperImpl[Conn, Scope, Tr]) ListScopes(input *plugin.ApiResourceInput, connectionId uint64) ([]*Scope, errors.Error)
- func (s *ScopeDatabaseHelperImpl[Conn, Scope, Tr]) SaveScope(scopes []*Scope) errors.Error
- func (s *ScopeDatabaseHelperImpl[Conn, Scope, Tr]) UpdateScope(scope *Scope) errors.Error
- func (s *ScopeDatabaseHelperImpl[Conn, Scope, Tr]) VerifyConnection(connectionId uint64) errors.Error
- type ScopeHelperOptions
- type ScopeRefDoc
- type ScopeReq
- type ScopeRes
- type ScopeResDoc
- type SearchRemoteScopesOutput
- type TaskOptions
- type WorkerScheduler
- func (s *WorkerScheduler) GetTickInterval() time.Duration
- func (s *WorkerScheduler) HasError() bool
- func (s *WorkerScheduler) NextTick(task func() errors.Error)
- func (s *WorkerScheduler) Release()
- func (s *WorkerScheduler) Reset(interval time.Duration)
- func (s *WorkerScheduler) SubmitBlocking(task func() errors.Error)
- func (s *WorkerScheduler) WaitAsync() errors.Error
Constants ¶
const TypeGroup string = "group"
const TypeProject string = "scope"
Variables ¶
var DateTimeFormats []DateTimeFormatItem
DateTimeFormats FIXME ...
var ErrFinishCollect = errors.Default.New("finish collect")
ErrFinishCollect is a error which will finish this collector
var ErrIgnoreAndContinue = errors.Default.New("ignore and continue")
ErrIgnoreAndContinue is a error which should be ignored
var HttpMinStatusRetryCode = http.StatusBadRequest
HttpMinStatusRetryCode is which status will retry
Functions ¶
func AddMissingSlashToURL ¶
func AddMissingSlashToURL(baseUrl *string)
AddMissingSlashToURL FIXME ...
func CalcTickInterval ¶
CalcTickInterval calculates tick interval for number of works to be executed in specified duration
func CallDB ¶
CallDB wraps DB calls with this signature, and handles the case if the struct is wrapped in a models.DynamicTabler.
func ConvertStringToTime ¶
ConvertStringToTime FIXME ...
func Decode ¶
Decode decodes `source` into `target`. Pass an optional validator to validate the target.
func DecodeHook ¶
func DecodeMapStruct ¶
func DecodeMapStruct(input map[string]interface{}, result interface{}, zeroFields bool) errors.Error
DecodeMapStruct with time.Time and Iso8601Time support
func DecodeStruct ¶
func DecodeStruct(output *viper.Viper, input interface{}, data map[string]interface{}, tag string) errors.Error
DecodeStruct validates `input` struct with `validator` and set it into viper `tag` represent the fields when setting config, and the fields with `tag` shall prevail. `input` must be a pointer
func EncodeStruct ¶
EncodeStruct encodes struct from viper `tag` represent the fields when setting config, and the fields with `tag` shall prevail. `object` must be a pointer
func ExtractCommitSha ¶
ExtractCommitSha extracts commit sha from commit url
func GetLimitOffset ¶
GetLimitOffset extract page and page size, then calculus the limit and offset from them
func GetRawMessageArrayFromResponse ¶
GetRawMessageArrayFromResponse FIXME ...
func GetRawMessageDirectFromResponse ¶
GetRawMessageDirectFromResponse FIXME ...
func GetURIStringPointer ¶
func GetURIStringPointer(baseUrl string, relativePath string, query url.Values) (*string, errors.Error)
GetURIStringPointer FIXME ...
func Iso8601TimeToTime ¶
func Iso8601TimeToTime(iso8601Time *Iso8601Time) *time.Time
Iso8601TimeToTime FIXME ...
func MakePipelinePlanSubtasks ¶
func MakePipelinePlanSubtasks(subtaskMetas []plugin.SubTaskMeta, entities []string) ([]string, errors.Error)
MakePipelinePlanSubtasks generates subtasks list based on sub-task meta information and entities wanted by user
func NewStatefulApiCollectorForFinalizableEntity ¶
func NewStatefulApiCollectorForFinalizableEntity(args FinalizableApiCollectorArgs) (plugin.SubTask, errors.Error)
NewStatefulApiCollectorForFinalizableEntity aims to add timeFilter/diffSync support for APIs that do NOT support filtering data by the updated date. However, it comes with the following constraints:
- The entity is a short-lived object or it is likely to be irrelevant a. ci/id pipelines are short-lived objects b. pull request might took a year to be closed or never, but it is likely irrelevant
- The entity must be Finalizable, meaning no future modifications will happen to it once it enter some sort of `Closed`/`Finished` status.
- The API must fit one of the following traits: a. it supports filtering by Created Date, in this case, you must implement the filtering via the `UrlTemplate`, `Query` or `Header` hook based on the API specification. b. or sorting by Created Date in Descending order, in this case, you must use `Concurrency` or `GetNextPageCustomData` instead of `GetTotalPages` for Undetermined Strategy since we have to stop the process in the middle.
Assuming the API fits the bill, the strategies can be categoried into:
- Determined Strategy: if the API supports filtering by the Created Date, use the `GetTotalPages` hook
- Undetermind Strategy: if the API supports sorting by the Created Date in Descending order and fetching by Page Number, use the `Concurrent` hook
- Sequential Strategy: if the API supports sorting by the Created Date in Descending order but the next page can only be fetched by the Cursor/Token from the previous page, use the `GetNextPageCustomData` hook
func RefineIssueRepoCommit ¶
func RefineIssueRepoCommit(item *crossdomain.IssueRepoCommit, repoPatterns []*regexp.Regexp, commitUrl string) *crossdomain.IssueRepoCommit
RefineIssueRepoCommit refines issue repo commit
func RemoveStartingSlashFromPath ¶
RemoveStartingSlashFromPath FIXME ...
func UnmarshalResponse ¶
UnmarshalResponse FIXME ...
Types ¶
type AccessToken ¶
type AccessToken struct {
Token string `mapstructure:"token" validate:"required" json:"token" gorm:"serializer:encdec"`
}
AccessToken implements HTTP Bearer Authentication with Access Token
func (*AccessToken) GetAccessTokenAuthenticator ¶
func (at *AccessToken) GetAccessTokenAuthenticator() plugin.ApiAuthenticator
GetAccessTokenAuthenticator returns SetupAuthentication
func (*AccessToken) SetupAuthentication ¶
func (at *AccessToken) SetupAuthentication(request *http.Request) errors.Error
SetupAuthentication sets up the request headers for authentication
type ApiAsyncClient ¶
type ApiAsyncClient struct { *ApiClient *WorkerScheduler // contains filtered or unexported fields }
ApiAsyncClient is built on top of ApiClient, to provide a asynchronous semantic You may submit multiple requests at once by calling `DoGetAsync`, and those requests will be performed in parallel with rate-limit support
func CreateAsyncApiClient ¶
func CreateAsyncApiClient( taskCtx plugin.TaskContext, apiClient *ApiClient, rateLimiter *ApiRateLimitCalculator, ) (*ApiAsyncClient, errors.Error)
CreateAsyncApiClient creates a new ApiAsyncClient
func (*ApiAsyncClient) DoAsync ¶
func (apiClient *ApiAsyncClient) DoAsync( method string, path string, query url.Values, body interface{}, header http.Header, handler common.ApiAsyncCallback, retry int, )
DoAsync would carry out an asynchronous request
func (*ApiAsyncClient) DoGetAsync ¶
func (apiClient *ApiAsyncClient) DoGetAsync( path string, query url.Values, header http.Header, handler common.ApiAsyncCallback, )
DoGetAsync Enqueue an api get request, the request may be sent sometime in future in parallel with other api requests
func (*ApiAsyncClient) DoPostAsync ¶
func (apiClient *ApiAsyncClient) DoPostAsync( path string, query url.Values, body interface{}, header http.Header, handler common.ApiAsyncCallback, )
DoPostAsync Enqueue an api post request, the request may be sent sometime in future in parallel with other api requests
func (*ApiAsyncClient) GetMaxRetry ¶
func (apiClient *ApiAsyncClient) GetMaxRetry() int
GetMaxRetry returns the maximum retry attempts for a request
func (*ApiAsyncClient) GetNumOfWorkers ¶
func (apiClient *ApiAsyncClient) GetNumOfWorkers() int
GetNumOfWorkers to return the Workers count if scheduler.
func (*ApiAsyncClient) SetMaxRetry ¶
func (apiClient *ApiAsyncClient) SetMaxRetry( maxRetry int, )
SetMaxRetry sets the maximum retry attempts for a request
type ApiClient ¶
type ApiClient struct {
// contains filtered or unexported fields
}
ApiClient is designed for simple api requests
func NewApiClient ¶
func NewApiClient( ctx gocontext.Context, endpoint string, headers map[string]string, timeout time.Duration, proxy string, br context.BasicRes, ) (*ApiClient, errors.Error)
NewApiClient creates a new synchronize ApiClient
func NewApiClientFromConnection ¶
func NewApiClientFromConnection( ctx gocontext.Context, br context.BasicRes, connection aha.ApiConnection, ) (*ApiClient, errors.Error)
NewApiClientFromConnection creates ApiClient based on given connection.
func (*ApiClient) Do ¶
func (apiClient *ApiClient) Do( method string, path string, query url.Values, body interface{}, headers http.Header, ) (*http.Response, errors.Error)
Do FIXME ...
func (*ApiClient) Get ¶
func (apiClient *ApiClient) Get( path string, query url.Values, headers http.Header, ) (*http.Response, errors.Error)
Get FIXME ...
func (*ApiClient) GetAfterFunction ¶
func (apiClient *ApiClient) GetAfterFunction() common.ApiClientAfterResponse
GetAfterFunction return afterResponseFunction
func (*ApiClient) GetBeforeFunction ¶
func (apiClient *ApiClient) GetBeforeFunction() common.ApiClientBeforeRequest
GetBeforeFunction return beforeResponseFunction
func (*ApiClient) GetEndpoint ¶
GetEndpoint FIXME ...
func (*ApiClient) GetHeaders ¶
GetHeaders FIXME ...
func (*ApiClient) GetTimeout ¶
GetTimeout FIXME ...
func (*ApiClient) Post ¶
func (apiClient *ApiClient) Post( path string, query url.Values, body interface{}, headers http.Header, ) (*http.Response, errors.Error)
Post FIXME ...
func (*ApiClient) SetAfterFunction ¶
func (apiClient *ApiClient) SetAfterFunction(callback common.ApiClientAfterResponse)
SetAfterFunction will set afterResponseFunction don't call this function directly in collector, use Collector.AfterResponse instead.
func (*ApiClient) SetBeforeFunction ¶
func (apiClient *ApiClient) SetBeforeFunction(callback common.ApiClientBeforeRequest)
SetBeforeFunction will set beforeResponseFunction
func (*ApiClient) SetContext ¶
SetContext FIXME ...
func (*ApiClient) SetEndpoint ¶
SetEndpoint FIXME ...
func (*ApiClient) SetHeaders ¶
SetHeaders FIXME ...
func (*ApiClient) SetTimeout ¶
SetTimeout FIXME ...
type ApiCollector ¶
type ApiCollector struct { *RawDataSubTask // contains filtered or unexported fields }
ApiCollector FIXME ...
func NewApiCollector ¶
func NewApiCollector(args ApiCollectorArgs) (*ApiCollector, errors.Error)
NewApiCollector allocates a new ApiCollector with the given args. ApiCollector can help us collecting data from api with ease, pass in a AsyncApiClient and tell it which part of response we want to save, ApiCollector will collect them from remote server and store them into database.
func (*ApiCollector) Execute ¶
func (collector *ApiCollector) Execute() errors.Error
Execute will start collection
func (*ApiCollector) GetAfterResponse ¶
func (collector *ApiCollector) GetAfterResponse() common.ApiClientAfterResponse
GetAfterResponse return apiClient's afterResponseFunction
func (*ApiCollector) SetAfterResponse ¶
func (collector *ApiCollector) SetAfterResponse(f common.ApiClientAfterResponse)
SetAfterResponse set apiClient's afterResponseFunction
type ApiCollectorArgs ¶
type ApiCollectorArgs struct { RawDataSubTaskArgs // UrlTemplate is used to generate the final URL for Api Collector to request // i.e. `api/3/issue/{{ .Input.IssueId }}/changelog` // For detail of what variables can be used, please check `RequestData` UrlTemplate string `comment:"GoTemplate for API url"` // Query would be sent out as part of the request URL Query func(reqData *RequestData) (url.Values, errors.Error) // Header would be sent out along with request Header func(reqData *RequestData) (http.Header, errors.Error) // GetTotalPages is to tell `ApiCollector` total number of pages based on response of the first page. // so `ApiCollector` could collect those pages in parallel for us GetTotalPages func(res *http.Response, args *ApiCollectorArgs) (int, errors.Error) // PageSize tells ApiCollector the page size PageSize int // GetNextPageCustomData indicate if this collection request each page in order and build query by the prev request GetNextPageCustomData func(prevReqData *RequestData, prevPageResponse *http.Response) (interface{}, errors.Error) // Incremental indicate if this is an incremental collection, the existing data won't get deleted if it was true Incremental bool `comment:"indicate if this collection is incremental update"` // ApiClient is a asynchronize api request client with qps ApiClient RateLimitedApiClient MinTickInterval *time.Duration // Input helps us collect data based on previous collected data, like collecting changelogs based on jira // issue ids Input Iterator // Concurrency specify qps for api that doesn't return total number of pages/records // NORMALLY, DO NOT SPECIFY THIS PARAMETER, unless you know what it means Concurrency int ResponseParser func(res *http.Response) ([]json.RawMessage, errors.Error) AfterResponse common.ApiClientAfterResponse RequestBody func(reqData *RequestData) map[string]interface{} Method string }
ApiCollectorArgs FIXME ...
type ApiCollectorStateManager ¶
type ApiCollectorStateManager struct { RawDataSubTaskArgs LatestState models.CollectorLatestState TimeAfter *time.Time ExecuteStart time.Time // contains filtered or unexported fields }
ApiCollectorStateManager save collector state in framework table
func NewStatefulApiCollector ¶
func NewStatefulApiCollector(args RawDataSubTaskArgs, timeAfter *time.Time) (*ApiCollectorStateManager, errors.Error)
NewApiCollectorWithState create a new ApiCollectorStateManager
func (*ApiCollectorStateManager) Execute ¶
func (m *ApiCollectorStateManager) Execute() errors.Error
Execute the embedded collector and record execute state
func (*ApiCollectorStateManager) InitCollector ¶
func (m *ApiCollectorStateManager) InitCollector(args ApiCollectorArgs) errors.Error
InitCollector init the embedded collector
func (*ApiCollectorStateManager) InitGraphQLCollector ¶
func (m *ApiCollectorStateManager) InitGraphQLCollector(args GraphqlCollectorArgs) errors.Error
InitGraphQLCollector init the embedded collector
func (*ApiCollectorStateManager) IsIncremental ¶
func (m *ApiCollectorStateManager) IsIncremental() bool
IsIncremental indicates if the collector should operate in incremental mode
type ApiExtractor ¶
type ApiExtractor struct { *RawDataSubTask // contains filtered or unexported fields }
ApiExtractor helps you extract Raw Data from api responses to Tool Layer Data It reads rows from specified raw data table, and feed it into `Extract` handler you can return arbitrary tool layer entities in this handler, ApiExtractor would first delete old data by their RawDataOrigin information, and then perform a batch save for you.
func NewApiExtractor ¶
func NewApiExtractor(args ApiExtractorArgs) (*ApiExtractor, errors.Error)
NewApiExtractor creates a new ApiExtractor
func (*ApiExtractor) Execute ¶
func (extractor *ApiExtractor) Execute() errors.Error
Execute sub-task
type ApiExtractorArgs ¶
type ApiExtractorArgs struct { RawDataSubTaskArgs Params interface{} Extract func(row *RawData) ([]interface{}, errors.Error) BatchSize int }
ApiExtractorArgs FIXME ...
type ApiRateLimitCalculator ¶
type ApiRateLimitCalculator struct { UserRateLimitPerHour int GlobalRateLimitPerHour int MaxRetry int Method string ApiPath string DynamicRateLimit func(res *http.Response) (int, time.Duration, errors.Error) }
ApiRateLimitCalculator is A helper to calculate api rate limit dynamically, assuming api returning remaining/resettime information
type AppKey ¶
type AppKey struct { AppId string `mapstructure:"appId" validate:"required" json:"appId"` SecretKey string `mapstructure:"secretKey" validate:"required" json:"secretKey" gorm:"serializer:encdec"` }
AppKey implements the API Key and Secret authentication mechanism
func (*AppKey) GetAppKeyAuthenticator ¶
func (ak *AppKey) GetAppKeyAuthenticator() plugin.ApiAuthenticator
GetAppKeyAuthenticator returns SetupAuthentication
type AsyncResponseHandler ¶
AsyncResponseHandler FIXME ...
type BaseConnection ¶
type BaseConnection struct { Name string `gorm:"type:varchar(100);uniqueIndex" json:"name" validate:"required"` common.Model }
BaseConnection defines basic properties that every connection should have
type BaseRemoteGroupResponse ¶
func (BaseRemoteGroupResponse) GroupId ¶
func (g BaseRemoteGroupResponse) GroupId() string
func (BaseRemoteGroupResponse) GroupName ¶
func (g BaseRemoteGroupResponse) GroupName() string
type BasicAuth ¶
type BasicAuth struct { Username string `mapstructure:"username" validate:"required" json:"username"` Password string `mapstructure:"password" validate:"required" json:"password" gorm:"serializer:encdec"` }
BasicAuth implements HTTP Basic Authentication
func (*BasicAuth) GetBasicAuthenticator ¶
func (ba *BasicAuth) GetBasicAuthenticator() plugin.ApiAuthenticator
GetBasicAuthenticator returns the ApiAuthenticator for setting up the HTTP request it looks odd to return itself with a different type, this is necessary because Callers might call the method from the Outer-Struct(`connection.SetupAuthentication(...)`) which would lead to a Stack Overflow error
func (*BasicAuth) GetEncodedToken ¶
GetEncodedToken returns encoded bearer token for HTTP Basic Authentication
type BatchSave ¶
type BatchSave struct {
// contains filtered or unexported fields
}
BatchSave performs mulitple records persistence of a specific type in one sql query to improve the performance
func NewBatchSave ¶
func NewBatchSave(basicRes context.BasicRes, slotType reflect.Type, size int, tableName ...string) (*BatchSave, errors.Error)
NewBatchSave creates a new BatchSave instance
func (*BatchSave) Add ¶
Add record to cache. BatchSave would flush them into Database when cache is max out
type BatchSaveDivider ¶
type BatchSaveDivider struct {
// contains filtered or unexported fields
}
BatchSaveDivider creates and caches BatchSave, this is helpful when dealing with massive amount of data records with arbitrary types.
func NewBatchSaveDivider ¶
func NewBatchSaveDivider(basicRes context.BasicRes, batchSize int, table string, params string) *BatchSaveDivider
NewBatchSaveDivider create a new BatchInsertDivider instance
func (*BatchSaveDivider) Close ¶
func (d *BatchSaveDivider) Close() errors.Error
Close all batches so the rest records get saved into db
type ConnectionApiHelper ¶
type ConnectionApiHelper struct {
// contains filtered or unexported fields
}
ConnectionApiHelper is used to write the CURD of connection
func NewConnectionHelper ¶
func NewConnectionHelper( basicRes context.BasicRes, vld *validator.Validate, pluginName string, ) *ConnectionApiHelper
NewConnectionHelper creates a ConnectionHelper for connection management
func (*ConnectionApiHelper) Create ¶
func (c *ConnectionApiHelper) Create(connection interface{}, input *plugin.ApiResourceInput) errors.Error
Create a connection record based on request body
func (*ConnectionApiHelper) Delete ¶
func (c *ConnectionApiHelper) Delete(connection interface{}) (*services.BlueprintProjectPairs, errors.Error)
Delete connection
func (*ConnectionApiHelper) First ¶
func (c *ConnectionApiHelper) First(connection interface{}, params map[string]string) errors.Error
First finds connection from db by parsing request input and decrypt it
func (*ConnectionApiHelper) FirstById ¶
func (c *ConnectionApiHelper) FirstById(connection interface{}, id uint64) errors.Error
FirstById finds connection from db by id and decrypt it
func (*ConnectionApiHelper) List ¶
func (c *ConnectionApiHelper) List(connections interface{}) errors.Error
List returns all connections with password/token decrypted
func (*ConnectionApiHelper) Patch ¶
func (c *ConnectionApiHelper) Patch(connection interface{}, input *plugin.ApiResourceInput) errors.Error
Patch (Modify) a connection record based on request body
type CursorPager ¶
CursorPager contains pagination information for a graphql request
type DalCursorIterator ¶
type DalCursorIterator struct {
// contains filtered or unexported fields
}
DalCursorIterator FIXME ...
func NewBatchedDalCursorIterator ¶
func NewBatchedDalCursorIterator(db dal.Dal, cursor dal.Rows, elemType reflect.Type, batchSize int) (*DalCursorIterator, errors.Error)
NewBatchedDalCursorIterator FIXME ...
func NewDalCursorIterator ¶
func NewDalCursorIterator(db dal.Dal, cursor dal.Rows, elemType reflect.Type) (*DalCursorIterator, errors.Error)
NewDalCursorIterator FIXME ...
func (*DalCursorIterator) Fetch ¶
func (c *DalCursorIterator) Fetch() (interface{}, errors.Error)
Fetch if batching is disabled, it'll read a single row, otherwise it'll read as many rows up to the batch size, and the runtime return type will be []interface{}. Note, HasNext needs to have been called before invoking this.
func (*DalCursorIterator) HasNext ¶
func (c *DalCursorIterator) HasNext() bool
HasNext increments the row curser. If we're at the end, it'll return false.
type DataConvertHandler ¶
DataConvertHandler Accept row from source cursor, return list of entities that need to be stored
type DataConverter ¶
type DataConverter struct { *RawDataSubTask // contains filtered or unexported fields }
DataConverter helps you convert Data from Tool Layer Tables to Domain Layer Tables It reads rows from specified Iterator, and feed it into `Converter` handler you can return arbitrary domain layer entities from this handler, ApiConverter would first delete old data by their RawDataOrigin information, and then perform a batch save operation for you.
func NewDataConverter ¶
func NewDataConverter(args DataConverterArgs) (*DataConverter, errors.Error)
NewDataConverter function helps you create a DataConverter using DataConverterArgs. You can see the usage in plugins/github/tasks/pr_issue_convertor.go or other convertor file.
func (*DataConverter) Execute ¶
func (converter *DataConverter) Execute() errors.Error
Execute function implements Subtask interface. It loads data from Tool Layer Tables using `Ctx.GetDal()`, convert Data using `converter.args.Convert` handler Then save data to Domain Layer Tables using BatchSaveDivider
type DataConverterArgs ¶
type DataConverterArgs struct { RawDataSubTaskArgs // Domain layer entity Id prefix, i.e. `jira:JiraIssue:1`, `github:GithubIssue` InputRowType reflect.Type Input dal.Rows Convert DataConvertHandler BatchSize int }
DataConverterArgs includes the arguments about DataConverter. This will be used in Creating a DataConverter.
DataConverterArgs { InputRowType: type of inputRow , Input: dal cursor, RawDataSubTaskArgs: args about raw data task Convert: main function including conversion logic BatchSize: batch size
type DataEnrichHandler ¶
DataEnrichHandler Accepts row from the Input and produces arbitrary records. you are free to modify given `row` in place and include it in returned result for it to be saved.
type DataEnricher ¶
type DataEnricher[InputRowType any] struct { // contains filtered or unexported fields }
DataEnricher helps you enrich Data with Cancellation and BatchSave supports
func NewDataEnricher ¶
func NewDataEnricher[InputRowType any](args DataEnricherArgs[InputRowType]) (*DataEnricher[InputRowType], errors.Error)
NewDataEnricher creates a new DataEnricher
func (*DataEnricher[InputRowType]) Execute ¶
func (enricher *DataEnricher[InputRowType]) Execute() errors.Error
type DataEnricherArgs ¶
type DataEnricherArgs[InputRowType any] struct { Ctx plugin.SubTaskContext Name string // Enricher name, which will be put into _raw_data_remark Input dal.Rows Enrich DataEnrichHandler[InputRowType] BatchSize int }
DataEnricherArgs includes the arguments needed for data enrichment
type DateIterator ¶
DateIterator FIXME ...
func NewDateIterator ¶
func NewDateIterator(days int) (*DateIterator, errors.Error)
NewDateIterator FIXME ...
func (*DateIterator) Fetch ¶
func (c *DateIterator) Fetch() (interface{}, errors.Error)
Fetch FIXME ...
type DateTimeFormatItem ¶
DateTimeFormatItem FIXME ... TODO: move this to helper
type FinalizableApiCollectorArgs ¶
type FinalizableApiCollectorArgs struct { RawDataSubTaskArgs ApiClient RateLimitedApiClient TimeAfter *time.Time // leave it be nil to disable time filter CollectNewRecordsByList FinalizableApiCollectorListArgs CollectUnfinishedDetails FinalizableApiCollectorDetailArgs }
type FinalizableApiCollectorCommonArgs ¶
type FinalizableApiCollectorCommonArgs struct { UrlTemplate string // required, url path template for the request, e.g. repos/{{ .Params.Name }}/pulls or incident/{{ .Input.Number }} (if using iterators) Method string // optional, request method, e.g. GET(default), POST, PUT, DELETE Query func(reqData *RequestData, createdAfter *time.Time) (url.Values, errors.Error) // optional, build query params for the request Header func(reqData *RequestData, createdAfter *time.Time) (http.Header, errors.Error) // optional, build header for the request RequestBody func(reqData *RequestData) map[string]interface{} // optional, build request body for the request if the Method set to POST or PUT MinTickInterval *time.Duration // optional, minimum interval between two requests, some endpoints might have a more conservative rate limit than others within the same instance, you can mitigate this by setting a higher MinTickInterval to override the connection level rate limit. AfterResponse common.ApiClientAfterResponse // optional, hook to run after each response, would be called before the ResponseParser ResponseParser func(res *http.Response) ([]json.RawMessage, errors.Error) // required, parse the response body and return a list of entities }
FinalizableApiCollectorCommonArgs is the common arguments for both list and detail collectors Note that all request-related arguments would be called or utilized before any response-related arguments
type FinalizableApiCollectorDetailArgs ¶
type FinalizableApiCollectorDetailArgs struct { FinalizableApiCollectorCommonArgs BuildInputIterator func() (Iterator, errors.Error) // required, create an iterator that iterates through all unfinalized records in the database. These records will be fed as the "Input" (or {{ .Input.* }} in URLTemplate) argument back into FinalizableApiCollectorCommonArgs which makes the API calls to re-collect their newest states. }
FinalizableApiCollectorDetailArgs is the arguments for the detail collector
type FinalizableApiCollectorListArgs ¶
type FinalizableApiCollectorListArgs struct { FinalizableApiCollectorCommonArgs GetCreated func(item json.RawMessage) (time.Time, errors.Error) // optional, to extract create date from a raw json of a single record, leave it be `nil` if API supports filtering by updated date (Don't forget to set the Query) PageSize int // required, number of records per page Concurrency int // required for Undetermined Strategy, number of concurrent requests GetNextPageCustomData func(prevReqData *RequestData, prevPageResponse *http.Response) (interface{}, errors.Error) // required for Sequential Strategy, to extract the next page cursor from the given response GetTotalPages func(res *http.Response, args *ApiCollectorArgs) (int, errors.Error) // required for Determined Strategy, to extract the total number of pages from the given response }
FinalizableApiCollectorListArgs is the arguments for the list collector
type FirstPageTokenOutput ¶
type FirstPageTokenOutput struct {
PageToken string `json:"pageToken"`
}
type GenericScopeApiHelper ¶
type GenericScopeApiHelper[Conn any, Scope plugin.ToolLayerScope, ScopeConfig any] struct { // contains filtered or unexported fields }
func NewGenericScopeHelper ¶
func NewGenericScopeHelper[Conn any, Scope plugin.ToolLayerScope, ScopeConfig any]( basicRes context.BasicRes, vld *validator.Validate, connHelper *ConnectionApiHelper, dbHelper ScopeDatabaseHelper[Conn, Scope, ScopeConfig], params *ReflectionParameters, opts *ScopeHelperOptions, ) *GenericScopeApiHelper[Conn, Scope, ScopeConfig]
func (*GenericScopeApiHelper[Conn, Scope, ScopeConfig]) DbHelper ¶
func (gs *GenericScopeApiHelper[Conn, Scope, ScopeConfig]) DbHelper() ScopeDatabaseHelper[Conn, Scope, ScopeConfig]
func (*GenericScopeApiHelper[Conn, Scope, ScopeConfig]) DeleteScope ¶
func (gs *GenericScopeApiHelper[Conn, Scope, ScopeConfig]) DeleteScope(input *plugin.ApiResourceInput) (*serviceHelper.BlueprintProjectPairs, errors.Error)
func (*GenericScopeApiHelper[Conn, Scope, ScopeConfig]) GetScope ¶
func (gs *GenericScopeApiHelper[Conn, Scope, ScopeConfig]) GetScope(input *plugin.ApiResourceInput) (*ScopeRes[Scope, ScopeConfig], errors.Error)
func (*GenericScopeApiHelper[Conn, Scope, ScopeConfig]) GetScopes ¶
func (gs *GenericScopeApiHelper[Conn, Scope, ScopeConfig]) GetScopes(input *plugin.ApiResourceInput) ([]*ScopeRes[Scope, ScopeConfig], errors.Error)
func (*GenericScopeApiHelper[Conn, Scope, ScopeConfig]) PutScopes ¶
func (gs *GenericScopeApiHelper[Conn, Scope, ScopeConfig]) PutScopes(input *plugin.ApiResourceInput, scopes []*Scope) ([]*ScopeRes[Scope, ScopeConfig], errors.Error)
func (*GenericScopeApiHelper[Conn, Scope, ScopeConfig]) UpdateScope ¶
func (gs *GenericScopeApiHelper[Conn, Scope, ScopeConfig]) UpdateScope(input *plugin.ApiResourceInput) (*ScopeRes[Scope, ScopeConfig], errors.Error)
type GraphqlAsyncClient ¶
type GraphqlAsyncClient struct {
// contains filtered or unexported fields
}
GraphqlAsyncClient send graphql one by one
func CreateAsyncGraphqlClient ¶
func CreateAsyncGraphqlClient( taskCtx plugin.TaskContext, graphqlClient *graphql.Client, logger log.Logger, getRateRemaining func(context.Context, *graphql.Client, log.Logger) (rateRemaining int, resetAt *time.Time, err errors.Error), ) (*GraphqlAsyncClient, errors.Error)
CreateAsyncGraphqlClient creates a new GraphqlAsyncClient
func (*GraphqlAsyncClient) GetMaxRetry ¶
func (apiClient *GraphqlAsyncClient) GetMaxRetry() (int, time.Duration)
GetMaxRetry returns the maximum retry attempts for a request
func (*GraphqlAsyncClient) NextTick ¶
func (apiClient *GraphqlAsyncClient) NextTick(task func() errors.Error, taskErrorChecker func(err error))
NextTick to return the NextTick of scheduler
func (*GraphqlAsyncClient) Query ¶
func (apiClient *GraphqlAsyncClient) Query(q interface{}, variables map[string]interface{}) ([]graphql.DataError, error)
Query send a graphql request when get lock []graphql.DataError are the errors returned in response body errors.Error is other error
func (*GraphqlAsyncClient) Release ¶
func (apiClient *GraphqlAsyncClient) Release()
Release will release the ApiAsyncClient with scheduler
func (*GraphqlAsyncClient) SetGetRateCost ¶
func (apiClient *GraphqlAsyncClient) SetGetRateCost(getRateCost func(q interface{}) int)
SetGetRateCost to calculate how many rate cost if not set, all query just cost 1
func (*GraphqlAsyncClient) SetMaxRetry ¶
func (apiClient *GraphqlAsyncClient) SetMaxRetry( maxRetry int, waitBeforeRetry time.Duration, )
SetMaxRetry sets the maximum retry attempts for a request
func (*GraphqlAsyncClient) Wait ¶
func (apiClient *GraphqlAsyncClient) Wait()
Wait blocks until all async requests were done
type GraphqlAsyncResponseHandler ¶
GraphqlAsyncResponseHandler callback function to handle the Response asynchronously
type GraphqlCollector ¶
type GraphqlCollector struct { *RawDataSubTask // contains filtered or unexported fields }
GraphqlCollector help you collect data from Graphql services
func NewGraphqlCollector ¶
func NewGraphqlCollector(args GraphqlCollectorArgs) (*GraphqlCollector, errors.Error)
NewGraphqlCollector allocates a new GraphqlCollector with the given args. GraphqlCollector can help us collect data from api with ease, pass in a AsyncGraphqlClient and tell it which part of response we want to save, GraphqlCollector will collect them from remote server and store them into database.
func (*GraphqlCollector) BatchSaveWithOrigin ¶
func (collector *GraphqlCollector) BatchSaveWithOrigin(divider *BatchSaveDivider, results []interface{}, row *RawData) errors.Error
BatchSaveWithOrigin save the results and fill raw data origin for them
func (*GraphqlCollector) Execute ¶
func (collector *GraphqlCollector) Execute() errors.Error
Execute api collection
func (*GraphqlCollector) ExtractExistRawData ¶
func (collector *GraphqlCollector) ExtractExistRawData(divider *BatchSaveDivider) errors.Error
ExtractExistRawData will extract data from existing data from raw layer if increment
func (*GraphqlCollector) HasError ¶
func (collector *GraphqlCollector) HasError() bool
HasError return if any error occurred
type GraphqlCollectorArgs ¶
type GraphqlCollectorArgs struct { RawDataSubTaskArgs // BuildQuery would be sent out as part of the request URL BuildQuery func(reqData *GraphqlRequestData) (query interface{}, variables map[string]interface{}, err error) // PageSize tells ApiCollector the page size PageSize int // GraphqlClient is a asynchronize api request client with qps GraphqlClient *GraphqlAsyncClient // Input helps us collect data based on previous collected data, like collecting changelogs based on jira // issue ids Input Iterator // how many times fetched from input, default 1 means only fetch once // NOTICE: InputStep=1 will fill value as item and InputStep>1 will fill value as []item InputStep int // Incremental indicate if this is a incremental collection, the existing data won't get deleted if it was true Incremental bool `comment:"indicate if this collection is incremental update"` // GetPageInfo is to tell `GraphqlCollector` is page information GetPageInfo func(query interface{}, args *GraphqlCollectorArgs) (*GraphqlQueryPageInfo, error) BatchSize int // one of ResponseParser and ResponseParserEvenWhenDataErrors is required to parse response ResponseParser func(query interface{}, variables map[string]interface{}) ([]interface{}, error) ResponseParserWithDataErrors func(query interface{}, variables map[string]interface{}, dataErrors []graphql.DataError) ([]interface{}, error) }
GraphqlCollectorArgs arguments needed by GraphqlCollector
type GraphqlQueryPageInfo ¶
type GraphqlQueryPageInfo struct { EndCursor string `json:"endCursor"` HasNextPage bool `json:"hasNextPage"` }
GraphqlQueryPageInfo contains the pagination data
type GraphqlRequestData ¶
type GraphqlRequestData struct { Pager *CursorPager Params interface{} Input interface{} InputJSON []byte }
GraphqlRequestData is the input of `UrlTemplate` `BuildQuery` and `Header`, so we can generate them dynamically
type Iso8601Time ¶
type Iso8601Time struct {
// contains filtered or unexported fields
}
Iso8601Time is type time.Time
func (Iso8601Time) MarshalJSON ¶
func (jt Iso8601Time) MarshalJSON() ([]byte, error)
MarshalJSON FIXME ...
func (*Iso8601Time) String ¶
func (jt *Iso8601Time) String() string
func (*Iso8601Time) ToNullableTime ¶
func (jt *Iso8601Time) ToNullableTime() *time.Time
ToNullableTime FIXME ...
func (*Iso8601Time) UnmarshalJSON ¶
func (jt *Iso8601Time) UnmarshalJSON(b []byte) error
UnmarshalJSON FIXME ...
type ListBaseNode ¶
type ListBaseNode struct {
// contains filtered or unexported fields
}
ListBaseNode 'abstract' base struct for Nodes that are chained in a linked list manner
func NewListBaseNode ¶
func NewListBaseNode() *ListBaseNode
NewListBaseNode create and init a new node (only to be called by subclasses)
func (*ListBaseNode) SetNext ¶
func (l *ListBaseNode) SetNext(next interface{})
SetNext updates the next pointer of the node
type MultiAuth ¶
type MultiAuth struct { AuthMethod string `mapstructure:"authMethod" json:"authMethod" validate:"required,oneof=BasicAuth AccessToken AppKey"` // contains filtered or unexported fields }
MultiAuth implements the MultiAuthenticator interface
func (*MultiAuth) GetApiAuthenticator ¶
func (ma *MultiAuth) GetApiAuthenticator(connection plugin.ApiConnection) (plugin.ApiAuthenticator, errors.Error)
func (*MultiAuth) SetupAuthenticationForConnection ¶
func (ma *MultiAuth) SetupAuthenticationForConnection(connection plugin.ApiConnection, req *http.Request) errors.Error
SetupAuthenticationForConnection sets up authentication for the specified `req` based on connection Specific Connection should implement IAuthentication and then call this method for MultiAuth to work properly, check jira/models/connection.go:JiraConn if you needed an example Note: this method would be called for each request, so it is performance-sensitive, do NOT use reflection here
func (*MultiAuth) ValidateConnection ¶
type NoRemoteGroupResponse ¶
type NoRemoteGroupResponse struct { }
func (NoRemoteGroupResponse) GroupId ¶
func (NoRemoteGroupResponse) GroupId() string
func (NoRemoteGroupResponse) GroupName ¶
func (NoRemoteGroupResponse) GroupName() string
type NoScopeConfig ¶
type NoScopeConfig struct{}
type Queue ¶
type Queue struct {
// contains filtered or unexported fields
}
Queue represetns a queue
func (*Queue) CleanWithOutLock ¶
func (q *Queue) CleanWithOutLock()
CleanWithOutLock is no lock mode of Clean
func (*Queue) GetCountWithOutLock ¶
GetCountWithOutLock is no lock mode of GetCount
func (*Queue) GetCountWithWorkingBlock ¶
GetCount get the node count in query and only return zero when working zero
func (*Queue) Pull ¶
Pull get a node from queue it will add the working count and blocked when there are no node on queue but working count not zero
func (*Queue) PullWithOutLock ¶
PullWithOutLock is no lock mode of Pull
func (*Queue) PullWithWorkingBlock ¶
func (*Queue) PushWithoutLock ¶
PushWithoutLock is no lock mode of Push
type QueueIterator ¶
type QueueIterator struct {
// contains filtered or unexported fields
}
QueueIterator implements Iterator based on Queue
func NewQueueIterator ¶
func NewQueueIterator() *QueueIterator
NewQueueIterator creates a new QueueIterator
func (*QueueIterator) Fetch ¶
func (q *QueueIterator) Fetch() (interface{}, errors.Error)
Fetch current item
func (*QueueIterator) HasNext ¶
func (q *QueueIterator) HasNext() bool
HasNext increments the row curser. If we're at the end, it'll return false.
type QueueIteratorNode ¶
type QueueIteratorNode struct {
// contains filtered or unexported fields
}
QueueIteratorNode implements the api.Iterator interface with ability to accept new item when being iterated
func NewQueueIteratorNode ¶
func NewQueueIteratorNode(data interface{}) *QueueIteratorNode
NewQueueIteratorNode creates a new QueueIteratorNode
func (*QueueIteratorNode) Data ¶
func (q *QueueIteratorNode) Data() interface{}
Data returns data of the node
func (*QueueIteratorNode) Next ¶
func (q *QueueIteratorNode) Next() interface{}
Next return the next node
func (*QueueIteratorNode) SetNext ¶
func (q *QueueIteratorNode) SetNext(next interface{})
SetNext updates the next pointer of the node
type QueueNode ¶
type QueueNode interface { Next() interface{} SetNext(next interface{}) Data() interface{} }
QueueNode represents a node in the queue
type RateLimitedApiClient ¶
type RateLimitedApiClient interface { DoGetAsync(path string, query url.Values, header http.Header, handler common.ApiAsyncCallback) DoPostAsync(path string, query url.Values, body interface{}, header http.Header, handler common.ApiAsyncCallback) WaitAsync() errors.Error HasError() bool NextTick(task func() errors.Error) GetNumOfWorkers() int GetAfterFunction() common.ApiClientAfterResponse SetAfterFunction(callback common.ApiClientAfterResponse) Reset(d time.Duration) GetTickInterval() time.Duration Release() }
RateLimitedApiClient FIXME ...
type RawData ¶
type RawData struct { ID uint64 `gorm:"primaryKey"` Params string `gorm:"type:varchar(255);index"` Data []byte Url string Input datatypes.JSON CreatedAt time.Time }
RawData is raw data structure in DB storage
type RawDataSubTask ¶
type RawDataSubTask struct {
// contains filtered or unexported fields
}
RawDataSubTask is Common features for raw data sub-tasks
func NewRawDataSubTask ¶
func NewRawDataSubTask(args RawDataSubTaskArgs) (*RawDataSubTask, errors.Error)
NewRawDataSubTask constructor for RawDataSubTask
func (*RawDataSubTask) GetParams ¶
func (r *RawDataSubTask) GetParams() string
GetParams returns the raw params
func (*RawDataSubTask) GetTable ¶
func (r *RawDataSubTask) GetTable() string
GetTable returns the raw table name
type RawDataSubTaskArgs ¶
type RawDataSubTaskArgs struct { Ctx plugin.SubTaskContext // Table store raw data Table string `comment:"Raw data table name"` // Deprecated: Use Options instead // This struct will be JSONEncoded and stored into database along with raw data itself, to identity minimal set of // data to be processed, for example, we process JiraIssues by Board Params any `comment:"To identify a set of records with same UrlTemplate, i.e. {ConnectionId, BoardId} for jira entities"` Options TaskOptions `comment:"To identify a set of records with same UrlTemplate, i.e. {ConnectionId, BoardId} for jira entities"` }
RawDataSubTaskArgs FIXME ...
type ReflectionParameters ¶
type ReflectionParameters struct { // This corresponds to the struct field of the scope struct's ID field ScopeIdFieldName string `validate:"required"` // This corresponds to the database column name of the scope struct's ID (typically primary key) field ScopeIdColumnName string `validate:"required"` // This corresponds to the scope field on the ApiParams struct of a plugin. RawScopeParamName string `validate:"required"` }
type RegexEnricher ¶
type RegexEnricher struct {
// contains filtered or unexported fields
}
RegexEnricher process value with regex pattern TODO: remove Enricher from naming since it is more like a util function
func NewRegexEnricher ¶
func NewRegexEnricher() *RegexEnricher
NewRegexEnricher initialize a regexEnricher
func (*RegexEnricher) AddRegexp ¶
func (r *RegexEnricher) AddRegexp(patterns ...string) errors.Error
AddRegexp will add compiled regular expression for pattern to regexpMap TODO: to be removed
func (*RegexEnricher) GetEnrichResult ¶
func (r *RegexEnricher) GetEnrichResult(pattern string, v string, result string) string
GetEnrichResult will get compiled regular expression from map by pattern, and check if v matches compiled regular expression, lastly, will return corresponding value(result or empty) TODO: to be removed
func (*RegexEnricher) ReturnNameIfMatched ¶
func (r *RegexEnricher) ReturnNameIfMatched(name string, targets ...string) string
ReturnNameIfMatched will return name if any of the targets matches the regex associated with the given name
func (*RegexEnricher) ReturnNameIfOmittedOrMatched ¶
func (r *RegexEnricher) ReturnNameIfOmittedOrMatched(name string, targets ...string) string
ReturnNameIfMatchedOrOmitted returns the given name if regex of the given name is omitted or fallback to ReturnNameIfMatched
type RemoteApiHelper ¶
type RemoteApiHelper[Conn plugin.ApiConnection, Scope plugin.ToolLayerScope, ApiScope plugin.ApiScope, Group plugin.ApiGroup] struct { // contains filtered or unexported fields }
RemoteApiHelper is used to write the CURD of connection
func NewRemoteHelper ¶
func NewRemoteHelper[Conn plugin.ApiConnection, Scope plugin.ToolLayerScope, ApiScope plugin.ApiScope, Group plugin.ApiGroup]( basicRes coreContext.BasicRes, vld *validator.Validate, connHelper *ConnectionApiHelper, ) *RemoteApiHelper[Conn, Scope, ApiScope, Group]
NewRemoteHelper creates a ScopeHelper for connection management
func (*RemoteApiHelper[Conn, Scope, ApiScope, Group]) GetScopesFromRemote ¶
func (r *RemoteApiHelper[Conn, Scope, ApiScope, Group]) GetScopesFromRemote(input *plugin.ApiResourceInput, getGroup func(basicRes coreContext.BasicRes, gid string, queryData *RemoteQueryData, connection Conn) ([]Group, errors.Error), getScope func(basicRes coreContext.BasicRes, gid string, queryData *RemoteQueryData, connection Conn) ([]ApiScope, errors.Error), ) (*plugin.ApiResourceOutput, errors.Error)
GetScopesFromRemote gets the scopes from api
func (*RemoteApiHelper[Conn, Scope, ApiScope, Group]) PrepareFirstPageToken ¶
func (r *RemoteApiHelper[Conn, Scope, ApiScope, Group]) PrepareFirstPageToken(customInfo string) (*plugin.ApiResourceOutput, errors.Error)
PrepareFirstPageToken prepares the first page token
func (*RemoteApiHelper[Conn, Scope, ApiScope, Group]) SearchRemoteScopes ¶
func (r *RemoteApiHelper[Conn, Scope, ApiScope, Group]) SearchRemoteScopes(input *plugin.ApiResourceInput, searchScope func(basicRes coreContext.BasicRes, queryData *RemoteQueryData, connection Conn) ([]ApiScope, errors.Error), ) (*plugin.ApiResourceOutput, errors.Error)
type RemoteQueryData ¶
type RemoteScopesChild ¶
type RemoteScopesOutput ¶
type RemoteScopesOutput struct { Children []RemoteScopesChild `json:"children"` NextPageToken string `json:"nextPageToken"` }
type RequestData ¶
type RequestData struct { Pager *Pager Params interface{} Input interface{} InputJSON []byte // equal to the return value from GetNextPageCustomData when PageSize>0 and not the first request CustomData interface{} }
RequestData is the input of `UrlTemplate` `Query` and `Header`, so we can generate them dynamically
type RestConnection ¶
type RestConnection struct { Endpoint string `mapstructure:"endpoint" validate:"required" json:"endpoint"` Proxy string `mapstructure:"proxy" json:"proxy"` RateLimitPerHour int `comment:"api request rate limit per hour" json:"rateLimitPerHour"` }
RestConnection implements the ApiConnection interface
func (RestConnection) GetEndpoint ¶
func (rc RestConnection) GetEndpoint() string
GetEndpoint returns the API endpoint of the connection, which always ends with "/"
func (RestConnection) GetProxy ¶
func (rc RestConnection) GetProxy() string
GetProxy returns the proxy for the connection
func (RestConnection) GetRateLimitPerHour ¶
func (rc RestConnection) GetRateLimitPerHour() int
GetProxy returns the Rate Limit for the connection
type ScopeApiHelper ¶
type ScopeApiHelper[Conn any, Scope plugin.ToolLayerScope, Tr any] struct { *GenericScopeApiHelper[Conn, Scope, Tr] }
ScopeApiHelper is used to write the CURD of scopes
func NewScopeHelper ¶
func NewScopeHelper[Conn any, Scope plugin.ToolLayerScope, Tr any]( basicRes context.BasicRes, vld *validator.Validate, connHelper *ConnectionApiHelper, dbHelper ScopeDatabaseHelper[Conn, Scope, Tr], params *ReflectionParameters, opts *ScopeHelperOptions, ) *ScopeApiHelper[Conn, Scope, Tr]
NewScopeHelper creates a ScopeHelper for scopes management
func (*ScopeApiHelper[Conn, Scope, Tr]) Delete ¶
func (c *ScopeApiHelper[Conn, Scope, Tr]) Delete(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error)
func (*ScopeApiHelper[Conn, Scope, Tr]) GetScope ¶
func (c *ScopeApiHelper[Conn, Scope, Tr]) GetScope(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error)
func (*ScopeApiHelper[Conn, Scope, Tr]) GetScopeList ¶
func (c *ScopeApiHelper[Conn, Scope, Tr]) GetScopeList(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error)
func (*ScopeApiHelper[Conn, Scope, Tr]) Put ¶
func (c *ScopeApiHelper[Conn, Scope, Tr]) Put(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error)
Put saves the given scopes to the database. It expects a slice of struct pointers as the scopes argument. It also expects a fieldName argument, which is used to extract the connection ID from the input.Params map.
func (*ScopeApiHelper[Conn, Scope, Tr]) Update ¶
func (c *ScopeApiHelper[Conn, Scope, Tr]) Update(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error)
type ScopeConfigHelper ¶
ScopeConfigHelper is used to write the CURD of scope config
func NewScopeConfigHelper ¶
func NewScopeConfigHelper[Tr dal.Tabler]( basicRes context.BasicRes, vld *validator.Validate, pluginName string, ) *ScopeConfigHelper[Tr]
NewScopeConfigHelper creates a ScopeConfigHelper for scope config management
func (ScopeConfigHelper[ScopeConfig]) Create ¶
func (t ScopeConfigHelper[ScopeConfig]) Create(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error)
func (ScopeConfigHelper[ScopeConfig]) Delete ¶
func (t ScopeConfigHelper[ScopeConfig]) Delete(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error)
func (ScopeConfigHelper[ScopeConfig]) Get ¶
func (t ScopeConfigHelper[ScopeConfig]) Get(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error)
func (ScopeConfigHelper[ScopeConfig]) List ¶
func (t ScopeConfigHelper[ScopeConfig]) List(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error)
func (ScopeConfigHelper[ScopeConfig]) Update ¶
func (t ScopeConfigHelper[ScopeConfig]) Update(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error)
type ScopeDatabaseHelper ¶
type ScopeDatabaseHelper[Conn any, Scope plugin.ToolLayerScope, Tr any] interface { VerifyConnection(connectionId uint64) errors.Error SaveScope(scopes []*Scope) errors.Error UpdateScope(scope *Scope) errors.Error GetScope(connectionId uint64, scopeId string) (*Scope, errors.Error) ListScopes(input *plugin.ApiResourceInput, connectionId uint64) ([]*Scope, errors.Error) DeleteScope(scope *Scope) errors.Error GetScopeConfig(ruleId uint64) (*Tr, errors.Error) ListScopeConfigs(ruleIds []uint64) ([]*Tr, errors.Error) GetScopeAndConfig(connectionId uint64, scopeId string) (*Scope, *Tr, errors.Error) }
type ScopeDatabaseHelperImpl ¶
type ScopeDatabaseHelperImpl[Conn any, Scope plugin.ToolLayerScope, Tr any] struct { ScopeDatabaseHelper[Conn, Scope, Tr] // contains filtered or unexported fields }
func NewScopeDatabaseHelperImpl ¶
func NewScopeDatabaseHelperImpl[Conn any, Scope plugin.ToolLayerScope, Tr any]( basicRes context.BasicRes, connHelper *ConnectionApiHelper, params *ReflectionParameters) *ScopeDatabaseHelperImpl[Conn, Scope, Tr]
func (*ScopeDatabaseHelperImpl[Conn, Scope, Tr]) DeleteScope ¶
func (s *ScopeDatabaseHelperImpl[Conn, Scope, Tr]) DeleteScope(scope *Scope) errors.Error
func (*ScopeDatabaseHelperImpl[Conn, Scope, Tr]) GetScope ¶
func (s *ScopeDatabaseHelperImpl[Conn, Scope, Tr]) GetScope(connectionId uint64, scopeId string) (*Scope, errors.Error)
func (*ScopeDatabaseHelperImpl[Conn, Scope, Tr]) GetScopeAndConfig ¶
func (s *ScopeDatabaseHelperImpl[Conn, Scope, Tr]) GetScopeAndConfig(connectionId uint64, scopeId string) (*Scope, *Tr, errors.Error)
func (*ScopeDatabaseHelperImpl[Conn, Scope, Tr]) GetScopeConfig ¶
func (s *ScopeDatabaseHelperImpl[Conn, Scope, Tr]) GetScopeConfig(ruleId uint64) (*Tr, errors.Error)
func (*ScopeDatabaseHelperImpl[Conn, Scope, Tr]) ListScopeConfigs ¶
func (s *ScopeDatabaseHelperImpl[Conn, Scope, Tr]) ListScopeConfigs(ruleIds []uint64) ([]*Tr, errors.Error)
func (*ScopeDatabaseHelperImpl[Conn, Scope, Tr]) ListScopes ¶
func (s *ScopeDatabaseHelperImpl[Conn, Scope, Tr]) ListScopes(input *plugin.ApiResourceInput, connectionId uint64) ([]*Scope, errors.Error)
func (*ScopeDatabaseHelperImpl[Conn, Scope, Tr]) SaveScope ¶
func (s *ScopeDatabaseHelperImpl[Conn, Scope, Tr]) SaveScope(scopes []*Scope) errors.Error
func (*ScopeDatabaseHelperImpl[Conn, Scope, Tr]) UpdateScope ¶
func (s *ScopeDatabaseHelperImpl[Conn, Scope, Tr]) UpdateScope(scope *Scope) errors.Error
func (*ScopeDatabaseHelperImpl[Conn, Scope, Tr]) VerifyConnection ¶
func (s *ScopeDatabaseHelperImpl[Conn, Scope, Tr]) VerifyConnection(connectionId uint64) errors.Error
type ScopeHelperOptions ¶
type ScopeRefDoc ¶
type ScopeRefDoc = serviceHelper.BlueprintProjectPairs
Alias, for swagger purposes
type ScopeRes ¶
type ScopeRes[Scope any, ScopeConfig any] struct { Scope *Scope `mapstructure:",squash"` // ideally we need this field to be embedded in the struct ScopeResDoc[ScopeConfig] `mapstructure:",squash"` // however, only this type of embeding is supported as of golang 1.20 }
func (*ScopeRes[T, Y]) MarshalJSON ¶
Implement MarshalJSON method to flatten all fields
type ScopeResDoc ¶
type ScopeResDoc[ScopeConfig any] struct { ScopeConfig *ScopeConfig `mapstructure:"scopeConfig,omitempty" json:"scopeConfig"` Blueprints []*models.Blueprint `mapstructure:"blueprints,omitempty" json:"blueprints"` }
as of golang v1.20, embedding generic fields is not supported let's divide the struct into two parts for swagger doc to work https://stackoverflow.com/questions/66118867/go-generics-is-it-possible-to-embed-generic-structs
type SearchRemoteScopesOutput ¶
type SearchRemoteScopesOutput struct { Children []RemoteScopesChild `json:"children"` Page int `json:"page"` PageSize int `json:"pageSize"` }
type TaskOptions ¶
type TaskOptions interface {
GetParams() any
}
type WorkerScheduler ¶
type WorkerScheduler struct {
// contains filtered or unexported fields
}
WorkerScheduler runs asynchronous tasks in parallel with throttling support
func NewWorkerScheduler ¶
func NewWorkerScheduler( ctx context.Context, numOfWorkers int, tickInterval time.Duration, logger log.Logger, ) (*WorkerScheduler, errors.Error)
NewWorkerScheduler creates a WorkerScheduler
func (*WorkerScheduler) GetTickInterval ¶
func (s *WorkerScheduler) GetTickInterval() time.Duration
GetTickInterval returns current tick interval of the WorkScheduler
func (*WorkerScheduler) HasError ¶
func (s *WorkerScheduler) HasError() bool
HasError return if any error occurred
func (*WorkerScheduler) NextTick ¶
func (s *WorkerScheduler) NextTick(task func() errors.Error)
NextTick enqueues task in a NonBlocking manner, you should only call this method within task submitted by SubmitBlocking method IMPORTANT: do NOT call this method with a huge number of tasks, it is likely to eat up all available memory
func (*WorkerScheduler) Reset ¶
func (s *WorkerScheduler) Reset(interval time.Duration)
Reset stops a WorkScheduler and resets its period to the specified duration.
func (*WorkerScheduler) SubmitBlocking ¶
func (s *WorkerScheduler) SubmitBlocking(task func() errors.Error)
SubmitBlocking enqueues a async task to ants, the task will be executed in future when timing is right. It doesn't return error because it wouldn't be any when with a Blocking semantic, returned error does nothing but causing confusion, more often, people thought it is returned by the task. Since it is async task, the callframes would not be available for production mode, you can export Environment Varaible ASYNC_CF=true to enable callframes capturing when debugging. IMPORTANT: do NOT call SubmitBlocking inside the async task, it is likely to cause a deadlock, call SubmitNonBlocking instead when number of tasks is relatively small.
func (*WorkerScheduler) WaitAsync ¶
func (s *WorkerScheduler) WaitAsync() errors.Error
Wait blocks current go-routine until all workers returned
Source Files ¶
- api_async_client.go
- api_client.go
- api_collector.go
- api_collector_func.go
- api_collector_with_state.go
- api_extractor.go
- api_ratelimit_calc.go
- api_rawdata.go
- batch_save.go
- batch_save_divider.go
- config_util.go
- connection.go
- connection_auths.go
- connection_helper.go
- cst_time.go
- data_convertor.go
- data_enricher.go
- enrich_with_regex.go
- graphql_async_client.go
- graphql_collector.go
- iso8601time.go
- issue_repo_commit_convertor.go
- iterator.go
- list.go
- mapstructure.go
- misc_helpers.go
- pagenation.go
- pipeline_plan.go
- queue.go
- reflection_helper.go
- remote_api_helper.go
- scope_config_helper.go
- scope_db_helper.go
- scope_generic_helper.go
- scope_helper.go
- worker_scheduler.go