A 9 million row csv file should be split and combined with required data
head -n 1116604 file.csv > file1.csv
This will take the first n amount of rows and output to a new file
This addresses a number of errors.
Ex:
Error: Node Sass does not yet support your current environment: OS X 64-bit with Unsupported runtime (72)/frontend/ or /main/package-lock.json in the correct directory/node_modules/ (with sudo)try {
yield put({
type: 'SET_BUILDING_DOWNLOAD',
loading: true,
complete: false,
loadOption: 'buildingOwnersLoading',
completeOption: 'buildingOwnersComplete',
});
case types.RENAME_SOCIAL_SUCCESS:
return {
...state,
social: state.social.map(item => {
if (item.socialId === action.socialId) {
//item.name = action.name
return {
...item,
name: action.namecase types.RENAME_SOCIAL_SUCCESS:
return {
...state,
social: state.social.map(item => {if(item.socialId === action.socialId)item.name = action.name})
};const objarray = [
{a: 1, b: 2, c:3},
{a: 2, b: 3, c:4},
{a: 3, b: 4, c:69},
]objarray.filter(item => !Object.values(item).includes(69))Imported from redux-saga/effects
| effect | example | what it do |
|---|---|---|
put |
send data to redux store | |
call |
sends the AJAX api request | |
all |
runs multiple effects in parallel, kind of like Promise.all | |
select |
||
takeEvery |
||
takeLatest |
yield takeLatest("API_CALL_REQUEST", workerSaga) |
Listens for action type and then executes workerSaga |