Skip to content

Instantly share code, notes, and snippets.

import React, { useState } from 'react';
import Avatar from '@material-ui/core/Avatar';
import Button from '@material-ui/core/Button';
import CssBaseline from '@material-ui/core/CssBaseline';
import TextField from '@material-ui/core/TextField';
import FormControlLabel from '@material-ui/core/FormControlLabel';
import Checkbox from '@material-ui/core/Checkbox';
import { BrowserRouter as Router, Switch, Redirect, Route, Link } from 'react-router-dom';
import Grid from '@material-ui/core/Grid';
import Box from '@material-ui/core/Box';
import React, { useState } from 'react';
import { BrowserRouter as Router, Switch, Redirect, Route, Link } from 'react-router-dom';
import Avatar from '@material-ui/core/Avatar';
import Button from '@material-ui/core/Button';
import CssBaseline from '@material-ui/core/CssBaseline';
import TextField from '@material-ui/core/TextField';
import FormControlLabel from '@material-ui/core/FormControlLabel';
import Checkbox from '@material-ui/core/Checkbox';
import Grid from '@material-ui/core/Grid';
import Box from '@material-ui/core/Box';
# coding=utf-8
# This is a sample Python script.
# Press ⌃R to execute it or replace it with your code.
# Press Double ⇧ to search everywhere for classes, files, tool windows, actions, and settings.
from flask import Flask, request, jsonify
from binance.client import Client
from binance.enums import *
import json
import json
from pyspark.sql import SparkSession
from pyspark.sql import Row
import sys
import re
import pandas as pd
import numpy as np
from collections import OrderedDict
import ast
import json
from pyspark.sql import SparkSession
from pyspark.sql import Row
import sys
import re
import pandas as pd
import numpy as np
from collections import OrderedDict
import ast
import requests
package com.flipkart.intentmeta.meta.autosuggest.session.searchSession;
import com.flipkart.intentmeta.meta.autosuggest.session.encoders.suggestionInstance.SuggestionInstance;
import com.flipkart.intentmeta.meta.autosuggest.sparkutils.datasetutils.ParquetWriter;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import static org.apache.spark.sql.functions.*;
public class SearchFunnelStitchingTask {
import json
from pyspark.sql import SparkSession
from pyspark.sql import Row
import sys
import re
import pandas as pd
import numpy as np
from collections import OrderedDict
import ast
http://10.47.100.96//solr/autosuggestEnhancedCA/select?q.alt=*:*^0&qt=dismax&multiplyScore=1.0&bf=sum(mul(log(sum(field(impressions_sint),1)),0.83280152),mul(field(wilson_ctr_sfloat),1.041053885),mul(field(fourdays_log_impression_sfloat),0.74535868),mul(field(fourdays_wilson_ctr_sfloat),0.08989967),if(termfreq(generation_sources_smstring,%22GROCERY%22),mul(log(sum(field(generative_clicks_sint),1)),0.04040999),if(termfreq(generation_sources_smstring,%22BOOKS%22),mul(log(sum(field(generative_clicks_sint),1)),0.033278319),0)))&bf=div(-2.85975342,field(query_length_sint))&fl=display_query_sstring&fl=normalized_query_tokens_sstring&fl=decoration_context_sstring&fl=score&fl=generation_sources_smstring&sort=score+desc,ranking_score_float+desc,p_hits_float+desc,ctr_float+desc&fq=generation_sources_smstring:%22GROCERY%22+OR+(generation_sources_smstring:%22ORGANIC%22+AND+impressions_sint:%5B20+TO+*%5D+AND+ctr_float:%5B0.05+TO+*%5D+AND+wilson_ctr_sfloat:%5B0.1+TO+*%5D+AND+num_tokens_int:%5B*+TO+7%5D)+OR+generation_sourc
{
"type": "capacitySchedulerLeafQueueInfo",
"capacity": 20,
"usedCapacity": 1050.4178,
"maxCapacity": 100,
"absoluteCapacity": 0.9933912,
"absoluteMaxCapacity": 90,
"absoluteUsedCapacity": 10.434715,
"numApplications": 7,
"queueName": "search_autosuggest",
This file has been truncated, but you can view the full file.
{
"scheduler": {
"schedulerInfo": {
"type": "capacityScheduler",
"capacity": 100.0,
"usedCapacity": 95.2961,
"maxCapacity": 100.0,
"queueName": "root",
"queues": {
"queue": [