Skip to content
Open
Show file tree
Hide file tree
Changes from 7 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .goreleaser.yml
Original file line number Diff line number Diff line change
Expand Up @@ -134,4 +134,4 @@ chocolateys:
api_key: '{{ .Env.CHOCOLATEY_API_KEY }}'
source_repo: "https://push.chocolatey.org/"
skip_publish: false
goamd64: v1
goamd64: v1
2 changes: 2 additions & 0 deletions .tool-versions
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
golangci-lint 1.63.4
golang 1.23.4
43 changes: 43 additions & 0 deletions api/dashboard/client.go
Original file line number Diff line number Diff line change
Expand Up @@ -388,6 +388,49 @@ func (c *Client) CreateAPIKey(accessToken, appID string, acl []string, descripti
return key, nil
}

// GetCrawlerUser gets the crawler API user data for the current authenticated user
func (c *Client) GetCrawlerUser(accessToken string) (*DashboardCrawlerUserData, error) {
req, err := http.NewRequest(http.MethodGet, c.APIURL+"/1/crawler/user", nil)
if err != nil {
return nil, err
}

c.setAPIHeaders(req, accessToken)

resp, err := c.client.Do(req)
if err != nil {
return nil, err
}
defer resp.Body.Close()

if resp.StatusCode != http.StatusOK {
var errResp DashboardCrawlerErrorResponse
if err := json.NewDecoder(resp.Body).Decode(&errResp); err != nil {
return nil, fmt.Errorf("failed to parse crawler response: %w", err)
}

if len(errResp.Errors) == 0 {
return nil, fmt.Errorf("failed to get crawler user data: unknown crawler error")
}

crawlerError := errResp.Errors[0]

message := crawlerError.Title
if crawlerError.Detail != nil && *crawlerError.Detail != "" {
message = *crawlerError.Detail
}

return nil, fmt.Errorf("failed to get crawler user data: %s", message)
}

var userResp DashboardCrawlerUserResponse
if err := json.NewDecoder(resp.Body).Decode(&userResp); err != nil {
return nil, fmt.Errorf("failed to parse crawler response: %w", err)
}

return &userResp.Data, nil
}

func (c *Client) setAPIHeaders(req *http.Request, accessToken string) {
req.Header.Set("Authorization", "Bearer "+accessToken)
req.Header.Set("Accept", "application/vnd.api+json")
Expand Down
103 changes: 103 additions & 0 deletions api/dashboard/client_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -197,3 +197,106 @@ func TestCreateApplication_Success(t *testing.T) {
assert.Equal(t, "NEW_APP", app.ID)
assert.Equal(t, "My App", app.Name)
}

func TestGetCrawlerUser_Success(t *testing.T) {
mux := http.NewServeMux()
mux.HandleFunc("/1/crawler/user", func(w http.ResponseWriter, r *http.Request) {
assert.Equal(t, http.MethodGet, r.Method)
assert.Equal(t, "Bearer test-token", r.Header.Get("Authorization"))

require.NoError(t, json.NewEncoder(w).Encode(DashboardCrawlerUserResponse{
Data: DashboardCrawlerUserData{
ID: "crawler-user-id",
Email: "crawler@example.com",
Name: "Crawler User",
APIKey: "crawler-api-key",
},
}))
})

ts, client := newTestClient(mux)
defer ts.Close()

user, err := client.GetCrawlerUser("test-token")
require.NoError(t, err)
assert.Equal(t, "crawler-user-id", user.ID)
assert.Equal(t, "crawler@example.com", user.Email)
assert.Equal(t, "Crawler User", user.Name)
assert.Equal(t, "crawler-api-key", user.APIKey)
}

func TestGetCrawlerUser_HTTPError(t *testing.T) {
mux := http.NewServeMux()
mux.HandleFunc("/1/crawler/user", func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusForbidden)
detail := "forbidden"
require.NoError(t, json.NewEncoder(w).Encode(DashboardCrawlerErrorResponse{
Errors: []DashboardCrawlerError{{
Status: http.StatusText(http.StatusForbidden),
Title: "Forbidden",
Detail: &detail,
}},
}))
})

ts, client := newTestClient(mux)
defer ts.Close()

_, err := client.GetCrawlerUser("test-token")
require.Error(t, err)
assert.Contains(t, err.Error(), "failed to get crawler user data: forbidden")
assert.NotContains(t, err.Error(), "403")
}

func TestGetCrawlerUser_HTTPErrorWithoutDetail(t *testing.T) {
mux := http.NewServeMux()
mux.HandleFunc("/1/crawler/user", func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusForbidden)
require.NoError(t, json.NewEncoder(w).Encode(DashboardCrawlerErrorResponse{
Errors: []DashboardCrawlerError{{
Status: http.StatusText(http.StatusForbidden),
Title: "Forbidden",
Detail: nil,
}},
}))
})

ts, client := newTestClient(mux)
defer ts.Close()

_, err := client.GetCrawlerUser("test-token")
require.Error(t, err)
assert.Contains(t, err.Error(), "failed to get crawler user data: Forbidden")
assert.NotContains(t, err.Error(), "403")
}

func TestGetCrawlerUser_InvalidJSON(t *testing.T) {
mux := http.NewServeMux()
mux.HandleFunc("/1/crawler/user", func(w http.ResponseWriter, r *http.Request) {
_, err := w.Write([]byte(`{"data":`))
require.NoError(t, err)
})

ts, client := newTestClient(mux)
defer ts.Close()

_, err := client.GetCrawlerUser("test-token")
require.Error(t, err)
assert.Contains(t, err.Error(), "failed to parse crawler response")
}

func TestGetCrawlerUser_HTTPErrorInvalidJSON(t *testing.T) {
mux := http.NewServeMux()
mux.HandleFunc("/1/crawler/user", func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusForbidden)
_, err := w.Write([]byte(`{"message":`))
require.NoError(t, err)
})

ts, client := newTestClient(mux)
defer ts.Close()

_, err := client.GetCrawlerUser("test-token")
require.Error(t, err)
assert.Contains(t, err.Error(), "failed to parse crawler response")
}
23 changes: 23 additions & 0 deletions api/dashboard/types.go
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,29 @@ type CreateAPIKeyResponse struct {
Data APIKeyResource `json:"data"`
}

// DashboardCrawlerUserData contains the user information from the crawler API
type DashboardCrawlerUserData struct {
ID string `json:"id"`
Email string `json:"email"`
Name string `json:"name"`
APIKey string `json:"apiKey"`
}

// DashboardCrawlerUserResponse is the JSON:API response from GET /1/crawler/user
type DashboardCrawlerUserResponse struct {
Data DashboardCrawlerUserData `json:"data"`
}

type DashboardCrawlerErrorResponse struct {
Errors []DashboardCrawlerError `json:"errors"`
}

type DashboardCrawlerError struct {
Status string `json:"status"`
Title string `json:"title"`
Detail *string `json:"detail"`
}

// toApplication flattens a JSON:API resource into a simple Application.
func (r *ApplicationResource) toApplication() Application {
return Application{
Expand Down
2 changes: 1 addition & 1 deletion pkg/auth/oauth_flow.go
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ func OAuthClientID() string {

// RunOAuth runs the OAuth PKCE flow with a local callback server and returns
// a valid access token. A local HTTP server is started on a random port to
// receive the authorization code via redirect no copy-paste required.
// receive the authorization code via redirect - no copy-paste required.
//
// When openBrowser is true the authorize URL is opened automatically;
// otherwise only the URL is printed (useful when the browser can't be
Expand Down
2 changes: 2 additions & 0 deletions pkg/cmd/auth/auth.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import (
"github.com/spf13/cobra"

"github.com/algolia/cli/pkg/auth"
"github.com/algolia/cli/pkg/cmd/auth/crawler"
"github.com/algolia/cli/pkg/cmd/auth/login"
"github.com/algolia/cli/pkg/cmd/auth/logout"
"github.com/algolia/cli/pkg/cmd/auth/signup"
Expand All @@ -22,6 +23,7 @@ func NewAuthCmd(f *cmdutil.Factory) *cobra.Command {
cmd.AddCommand(login.NewLoginCmd(f))
cmd.AddCommand(logout.NewLogoutCmd(f))
cmd.AddCommand(signup.NewSignupCmd(f))
cmd.AddCommand(crawler.NewCrawlerCmd(f))

return cmd
}
83 changes: 83 additions & 0 deletions pkg/cmd/auth/crawler/crawler.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
package crawler

import (
"fmt"

"github.com/algolia/cli/api/dashboard"
"github.com/algolia/cli/pkg/auth"
"github.com/algolia/cli/pkg/cmdutil"
"github.com/algolia/cli/pkg/config"
"github.com/algolia/cli/pkg/iostreams"
"github.com/algolia/cli/pkg/validators"
"github.com/spf13/cobra"
)

type CrawlerOptions struct {
IO *iostreams.IOStreams
config config.IConfig
OAuthClientID func() string
NewDashboardClient func(clientID string) *dashboard.Client
GetValidToken func(client *dashboard.Client) (string, error)
}

func NewCrawlerCmd(f *cmdutil.Factory) *cobra.Command {
opts := &CrawlerOptions{
IO: f.IOStreams,
config: f.Config,
OAuthClientID: auth.OAuthClientID,
NewDashboardClient: func(clientID string) *dashboard.Client {
return dashboard.NewClient(clientID)
},
GetValidToken: auth.GetValidToken,
}

cmd := &cobra.Command{
Use: "crawler",
Short: "Load crawler auth details for the current profile",
Args: validators.NoArgs(),
RunE: func(cmd *cobra.Command, args []string) error {
return runCrawlerCmd(opts)
},
}

return cmd
}

func runCrawlerCmd(opts *CrawlerOptions) error {
cs := opts.IO.ColorScheme()
dashboardClient := opts.NewDashboardClient(opts.OAuthClientID())

accessToken, err := opts.GetValidToken(dashboardClient)
if err != nil {
return err
}

opts.IO.StartProgressIndicatorWithLabel("Fetching crawler information")
crawlerUserData, err := dashboardClient.GetCrawlerUser(accessToken)
opts.IO.StopProgressIndicator()
if err != nil {
return err
}

currentProfileName := opts.config.Profile().Name
if currentProfileName == "" {
defaultProfile := opts.config.Default()
if defaultProfile != nil {
currentProfileName = defaultProfile.Name
opts.config.Profile().Name = currentProfileName
}
}
if currentProfileName == "" {
return fmt.Errorf("no profile selected and no default profile configured")
}

if err = opts.config.SetCrawlerAuth(currentProfileName, crawlerUserData.ID, crawlerUserData.APIKey); err != nil {
return err
}

if opts.IO.IsStdoutTTY() {
fmt.Fprintf(opts.IO.Out, "%s Crawler API auth credentials configured for profile: %s\n", cs.SuccessIcon(), currentProfileName)
}

return nil
}
Loading
Loading