added a loader screen for large xml
parent
e7b2556f3e
commit
3f09cdca6e
|
|
@ -21,6 +21,7 @@ yarn-debug.log*
|
|||
yarn-error.log*
|
||||
|
||||
public/png
|
||||
*.xml
|
||||
|
||||
# Created by https://www.gitignore.io/api/python
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,8 @@
|
|||
import * as React from 'react';
|
||||
import { bindActionCreators } from 'redux';
|
||||
import { Provider, connect } from 'react-redux';
|
||||
import { LexEditor } from './LexComponents';
|
||||
// import { LexEditor } from './LexComponents';
|
||||
import { LexSetup } from './LexSetup';
|
||||
import * as actionCreators from './actionCreators';
|
||||
import { walleStore } from './WallEStore';
|
||||
import { Header, Icon, Segment } from 'semantic-ui-react';
|
||||
|
|
@ -26,7 +27,7 @@ export class Main extends React.Component<any, any> {
|
|||
</Header.Content>
|
||||
</Header>
|
||||
</Segment>
|
||||
<LexEditor {...this.props} fileName="/new_es_orig.xml" />
|
||||
<LexSetup {...this.props} fileName="/new_es_orig.xml"/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,85 +1,18 @@
|
|||
import * as React from 'react';
|
||||
// import { connect } from 'react-redux';
|
||||
import * as _ from 'lodash';
|
||||
import { LexEdit } from './LexEdit';
|
||||
import {
|
||||
Input,
|
||||
Dropdown,
|
||||
} from 'semantic-ui-react';
|
||||
import * as XML from 'xml2js';
|
||||
const { Flex } = require('reflexbox');
|
||||
|
||||
// container component
|
||||
export class LexEditor extends React.Component<any, any> {
|
||||
lexData: any;
|
||||
allEntries: any;
|
||||
selectFields: any;
|
||||
fieldMetaMap = {
|
||||
label: { lens: 'label[0]', type: 'text' },
|
||||
unl: { lens: 'unl[0]', type: 'text' },
|
||||
synset: { lens: 'lexprops[0].wnsynset[0]', type: 'text' },
|
||||
guid: { lens: 'guid[0]', type: 'text' },
|
||||
pos: { lens: 'pos[0]', type: 'select' },
|
||||
image: { lens: 'image[0]', type: 'preview' },
|
||||
relations: { lens: 'relations[0]', type: 'text' },
|
||||
frame: { lens: 'syntacticprops[0].property[0]._', type: 'select' },
|
||||
morphclass: {
|
||||
lens: 'lexprops[0].morphology[0].morph[0]._',
|
||||
type: 'select'
|
||||
},
|
||||
stats: { lens: 'stats[0].property[0]._', type: 'text' },
|
||||
lang: { lens: '$.id', type: 'select', options: ['en', 'es'] },
|
||||
};
|
||||
|
||||
public componentDidMount() {
|
||||
fetch(this.props.fileName)
|
||||
.then((response) => response.text())
|
||||
.then((xmlString) => {
|
||||
XML.parseString(xmlString, (err, lexData) => {
|
||||
this.lexData = lexData;
|
||||
this.allEntries = _.chain(lexData)
|
||||
.get<any>('document.lexicon[0].item')
|
||||
.flatMap((o: any) => _.chain(o)
|
||||
.get<any>('entry')
|
||||
.map((p: any) => _.chain(p)
|
||||
.get<any>('lang[0]')
|
||||
.set('guid[0]', o.$.guid)
|
||||
.value())
|
||||
.value()
|
||||
)
|
||||
.value();
|
||||
let langReducer = ((result: any, q: any) => {
|
||||
let lang = _.get<any>(q, this.fieldMetaMap.lang.lens, 'en');
|
||||
(result[lang] || (result[lang] = [])).push(q);
|
||||
return result;
|
||||
});
|
||||
let langEntries = _.reduce(this.allEntries, langReducer, {});
|
||||
let langs = _.keys(langEntries);
|
||||
this.selectFields = _.fromPairs(langs.map((lang) => {
|
||||
let langOpts = _.fromPairs(_.keys(this.fieldMetaMap).filter((s) => {
|
||||
return this.fieldMetaMap[s].type === 'select';
|
||||
}).map((s) => {
|
||||
let lens = this.fieldMetaMap[s].lens;
|
||||
let entries = _.get<any>(langEntries, lang, 'en');
|
||||
let selectOptions = _.uniq(entries.map((q: any) => {
|
||||
return _.get<any>(q, lens, '');
|
||||
}));
|
||||
return [s, selectOptions];
|
||||
}));
|
||||
return [lang, langOpts];
|
||||
}));
|
||||
this.forceUpdate();
|
||||
});
|
||||
})
|
||||
.catch((e) => {
|
||||
console.log('errored :', e);
|
||||
});
|
||||
}
|
||||
|
||||
public render() {
|
||||
let searchLens = this.fieldMetaMap[this.props.searchState.searchType].lens;
|
||||
let searchLens = this.props.fieldMetaMap[this.props.searchState.searchType].lens;
|
||||
let searchText = this.props.searchState.searchValue;
|
||||
let matchedEntries = _.chain(this.allEntries)
|
||||
let matchedEntries = _.chain(this.props.allEntries)
|
||||
.filter((q: any) => _.get<any>(q, searchLens, '') === searchText)
|
||||
.take(10)
|
||||
.value();
|
||||
|
|
@ -87,15 +20,12 @@ export class LexEditor extends React.Component<any, any> {
|
|||
<div>
|
||||
<LexSearch
|
||||
{...this.props}
|
||||
{...{ fieldMetaMap: this.fieldMetaMap }}
|
||||
// handleOnSearch={(e: any) => this.handleOnSearch(e)}
|
||||
// searchValue={searchText}
|
||||
// searchType={this.props.searchProp.searchType}
|
||||
{...{ fieldMetaMap: this.props.fieldMetaMap }}
|
||||
/>
|
||||
<LexMatches
|
||||
{...{ fieldMetaMap: this.fieldMetaMap }}
|
||||
{...{ fieldMetaMap: this.props.fieldMetaMap }}
|
||||
matchedEntries={matchedEntries}
|
||||
selectionMeta={this.selectFields}
|
||||
selectionMeta={this.props.selectFields}
|
||||
searchText={searchText}
|
||||
searchLens={searchLens}
|
||||
/>
|
||||
|
|
@ -104,8 +34,6 @@ export class LexEditor extends React.Component<any, any> {
|
|||
}
|
||||
}
|
||||
|
||||
// export const ReduxLexEditor = connect()(LexEditor);
|
||||
|
||||
class LexSearch extends React.Component<any, any> {
|
||||
public render() {
|
||||
let dropOptions = _.keys(this.props.fieldMetaMap).map((k, i, c) => {
|
||||
|
|
|
|||
|
|
@ -0,0 +1,81 @@
|
|||
import * as React from 'react';
|
||||
import { Dimmer, Loader } from 'semantic-ui-react';
|
||||
import * as XML from 'xml2js';
|
||||
import * as _ from 'lodash';
|
||||
import { LexEditor } from './LexComponents';
|
||||
|
||||
const fieldMetaMap = {
|
||||
label: { lens: 'label[0]', type: 'text' },
|
||||
unl: { lens: 'unl[0]', type: 'text' },
|
||||
synset: { lens: 'lexprops[0].wnsynset[0]', type: 'text' },
|
||||
guid: { lens: 'guid[0]', type: 'text' },
|
||||
pos: { lens: 'pos[0]', type: 'select' },
|
||||
image: { lens: 'image[0]', type: 'preview' },
|
||||
relations: { lens: 'relations[0]', type: 'text' },
|
||||
frame: { lens: 'syntacticprops[0].property[0]._', type: 'select' },
|
||||
morphclass: {
|
||||
lens: 'lexprops[0].morphology[0].morph[0]._',
|
||||
type: 'select'
|
||||
},
|
||||
stats: { lens: 'stats[0].property[0]._', type: 'text' },
|
||||
lang: { lens: '$.id', type: 'select', options: ['en', 'es'] },
|
||||
};
|
||||
|
||||
export class LexSetup extends React.Component<any, any> {
|
||||
public componentDidMount() {
|
||||
fetch(this.props.fileName)
|
||||
.then((response) => response.text())
|
||||
.then((xmlString) => {
|
||||
XML.parseString(xmlString, (err, lexData) => {
|
||||
let allEntries = _.chain(lexData)
|
||||
.get<any>('document.lexicon[0].item')
|
||||
.flatMap((o: any) => _.chain(o)
|
||||
.get<any>('entry')
|
||||
.map((p: any) => _.chain(p)
|
||||
.get<any>('lang[0]')
|
||||
.set('guid[0]', o.$.guid)
|
||||
.value())
|
||||
.value()
|
||||
)
|
||||
.value();
|
||||
|
||||
let langReducer = ((result: any, q: any) => {
|
||||
let lang = _.get<any>(q, fieldMetaMap.lang.lens, 'en');
|
||||
(result[lang] || (result[lang] = [])).push(q);
|
||||
return result;
|
||||
});
|
||||
let langEntries = _.reduce(allEntries, langReducer, {});
|
||||
let langs = _.keys(langEntries);
|
||||
let selectFields = _.fromPairs(langs.map((lang) => {
|
||||
let langOpts = _.fromPairs(_.keys(fieldMetaMap).filter((s) => {
|
||||
return fieldMetaMap[s].type === 'select';
|
||||
}).map((s) => {
|
||||
let lens = fieldMetaMap[s].lens;
|
||||
let entries = _.get<any>(langEntries, lang, 'en');
|
||||
let selectOptions = _.uniq(entries.map((q: any) => {
|
||||
return _.get<any>(q, lens, '');
|
||||
}));
|
||||
return [s, selectOptions];
|
||||
}));
|
||||
return [lang, langOpts];
|
||||
}));
|
||||
this.setState({
|
||||
lexData, allEntries, selectFields, fieldMetaMap
|
||||
});
|
||||
});
|
||||
})
|
||||
.catch((e) => {
|
||||
console.log('errored :', e);
|
||||
});
|
||||
}
|
||||
|
||||
render() {
|
||||
return this.state ? (
|
||||
<LexEditor {...this.state} {...this.props} />
|
||||
) : (
|
||||
<Dimmer active={true} inverted={true}>
|
||||
<Loader inverted={true}>Loading</Loader>
|
||||
</Dimmer>
|
||||
);
|
||||
}
|
||||
}
|
||||
Loading…
Reference in New Issue