diff --git a/native/chat/inline-multimedia.react.js b/native/chat/inline-multimedia.react.js
index bd6298066..45cfcc1c3 100644
--- a/native/chat/inline-multimedia.react.js
+++ b/native/chat/inline-multimedia.react.js
@@ -1,149 +1,149 @@
// @flow
import Icon from '@expo/vector-icons/Feather.js';
import IonIcon from '@expo/vector-icons/Ionicons.js';
import * as React from 'react';
import { View, StyleSheet, Text } from 'react-native';
import * as Progress from 'react-native-progress';
import tinycolor from 'tinycolor2';
import { isLocalUploadID } from 'lib/media/media-utils.js';
import type { MediaInfo } from 'lib/types/media-types.js';
import GestureTouchableOpacity from '../components/gesture-touchable-opacity.react.js';
import type { PendingMultimediaUpload } from '../input/input-state.js';
import Multimedia from '../media/multimedia.react.js';
type Props = {
+mediaInfo: MediaInfo,
+onPress: () => void,
+postInProgress: boolean,
+pendingUpload: ?PendingMultimediaUpload,
+spinnerColor: string,
};
function InlineMultimedia(props: Props): React.Node {
const { mediaInfo, pendingUpload, postInProgress } = props;
let failed = isLocalUploadID(mediaInfo.id) && !postInProgress;
let progressPercent = 1;
let processingStep;
if (pendingUpload) {
({ progressPercent, failed, processingStep } = pendingUpload);
}
let progressIndicator;
if (failed) {
progressIndicator = (
);
} else if (progressPercent !== 1) {
const progressOverlay = (
{`${Math.floor(progressPercent * 100).toString()}%`}
{processingStep ? processingStep : 'pending'}
);
const primaryColor = tinycolor(props.spinnerColor);
const secondaryColor = primaryColor.isDark()
? primaryColor.lighten(20).toString()
: primaryColor.darken(20).toString();
const progressSpinnerProps = {
size: 120,
indeterminate: progressPercent === 0,
progress: progressPercent,
fill: secondaryColor,
unfilledColor: secondaryColor,
color: props.spinnerColor,
thickness: 10,
borderWidth: 0,
};
let progressSpinner;
if (processingStep === 'transcoding') {
progressSpinner = ;
} else {
progressSpinner = ;
}
progressIndicator = (
{progressSpinner}
{progressOverlay}
);
}
let playButton;
- if (mediaInfo.type === 'video') {
+ if (mediaInfo.type === 'video' || mediaInfo.type === 'encrypted_video') {
playButton = (
);
}
return (
{progressIndicator ? progressIndicator : playButton}
);
}
const styles = StyleSheet.create({
centerContainer: {
alignItems: 'center',
bottom: 0,
justifyContent: 'center',
left: 0,
position: 'absolute',
right: 0,
top: 0,
},
expand: {
flex: 1,
},
playButton: {
color: 'white',
opacity: 0.9,
textShadowColor: '#000',
textShadowOffset: { width: 0, height: 1 },
textShadowRadius: 1,
},
processingStepText: {
color: 'white',
fontSize: 12,
textShadowColor: '#000',
textShadowRadius: 1,
},
progressOverlay: {
alignItems: 'center',
justifyContent: 'center',
position: 'absolute',
},
progressPercentText: {
color: 'white',
fontSize: 24,
fontWeight: 'bold',
textShadowColor: '#000',
textShadowRadius: 1,
},
uploadError: {
color: 'white',
textShadowColor: '#000',
textShadowOffset: { width: 0, height: 1 },
textShadowRadius: 1,
},
});
export default InlineMultimedia;
diff --git a/native/chat/multimedia-message.react.js b/native/chat/multimedia-message.react.js
index df5bb17d2..02b7f2f8e 100644
--- a/native/chat/multimedia-message.react.js
+++ b/native/chat/multimedia-message.react.js
@@ -1,251 +1,251 @@
// @flow
import type {
LeafRoute,
NavigationProp,
ParamListBase,
} from '@react-navigation/native';
import { useNavigation, useRoute } from '@react-navigation/native';
import * as React from 'react';
import { StyleSheet, View } from 'react-native';
import { messageKey } from 'lib/shared/message-utils.js';
import { useCanCreateSidebarFromMessage } from 'lib/shared/thread-utils.js';
import type { MediaInfo } from 'lib/types/media-types.js';
import ComposedMessage from './composed-message.react.js';
import { InnerMultimediaMessage } from './inner-multimedia-message.react.js';
import {
getMediaKey,
multimediaMessageSendFailed,
} from './multimedia-message-utils.js';
import { getMessageTooltipKey } from './utils.js';
import { ChatContext, type ChatContextType } from '../chat/chat-context.js';
import { OverlayContext } from '../navigation/overlay-context.js';
import type { OverlayContextType } from '../navigation/overlay-context.js';
import {
ImageModalRouteName,
MultimediaMessageTooltipModalRouteName,
VideoPlaybackModalRouteName,
} from '../navigation/route-names.js';
import { fixedTooltipHeight } from '../tooltip/tooltip.react.js';
import type { ChatMultimediaMessageInfoItem } from '../types/chat-types.js';
import type {
VerticalBounds,
LayoutCoordinates,
} from '../types/layout-types.js';
type BaseProps = {
...React.ElementConfig,
+item: ChatMultimediaMessageInfoItem,
+focused: boolean,
+toggleFocus: (messageKey: string) => void,
+verticalBounds: ?VerticalBounds,
};
type Props = {
...BaseProps,
+navigation: NavigationProp,
+route: LeafRoute<>,
+overlayContext: ?OverlayContextType,
+chatContext: ?ChatContextType,
+canCreateSidebarFromMessage: boolean,
};
type State = {
+clickable: boolean,
};
class MultimediaMessage extends React.PureComponent {
state: State = {
clickable: true,
};
view: ?React.ElementRef;
setClickable = (clickable: boolean) => {
this.setState({ clickable });
};
onPressMultimedia = (
mediaInfo: MediaInfo,
initialCoordinates: LayoutCoordinates,
) => {
const { navigation, item, route, verticalBounds } = this.props;
navigation.navigate<'VideoPlaybackModal' | 'ImageModal'>({
name:
- mediaInfo.type === 'video'
+ mediaInfo.type === 'video' || mediaInfo.type === 'encrypted_video'
? VideoPlaybackModalRouteName
: ImageModalRouteName,
key: getMediaKey(item, mediaInfo),
params: {
presentedFrom: route.key,
mediaInfo,
item,
initialCoordinates,
verticalBounds,
},
});
};
visibleEntryIDs() {
const result = [];
if (
this.props.item.threadCreatedFromMessage ||
this.props.canCreateSidebarFromMessage
) {
result.push('sidebar');
}
if (!this.props.item.messageInfo.creator.isViewer) {
result.push('report');
}
return result;
}
onLayout = () => {};
viewRef = (view: ?React.ElementRef) => {
this.view = view;
};
onLongPress = () => {
const visibleEntryIDs = this.visibleEntryIDs();
if (visibleEntryIDs.length === 0) {
return;
}
const {
view,
props: { verticalBounds },
} = this;
if (!view || !verticalBounds) {
return;
}
if (!this.state.clickable) {
return;
}
this.setClickable(false);
const { item } = this.props;
if (!this.props.focused) {
this.props.toggleFocus(messageKey(item.messageInfo));
}
this.props.overlayContext?.setScrollBlockingModalStatus('open');
view.measure((x, y, width, height, pageX, pageY) => {
const coordinates = { x: pageX, y: pageY, width, height };
const multimediaTop = pageY;
const multimediaBottom = pageY + height;
const boundsTop = verticalBounds.y;
const boundsBottom = verticalBounds.y + verticalBounds.height;
const belowMargin = 20;
const belowSpace = fixedTooltipHeight + belowMargin;
const { isViewer } = item.messageInfo.creator;
const aboveMargin = isViewer ? 30 : 50;
const aboveSpace = fixedTooltipHeight + aboveMargin;
let margin = belowMargin;
if (
multimediaBottom + belowSpace > boundsBottom &&
multimediaTop - aboveSpace > boundsTop
) {
margin = aboveMargin;
}
const currentInputBarHeight =
this.props.chatContext?.chatInputBarHeights.get(item.threadInfo.id) ??
0;
this.props.navigation.navigate<'MultimediaMessageTooltipModal'>({
name: MultimediaMessageTooltipModalRouteName,
params: {
presentedFrom: this.props.route.key,
item,
initialCoordinates: coordinates,
verticalBounds,
tooltipLocation: 'fixed',
margin,
visibleEntryIDs,
chatInputBarHeight: currentInputBarHeight,
},
key: getMessageTooltipKey(item),
});
});
};
canNavigateToSidebar() {
return (
this.props.item.threadCreatedFromMessage ||
this.props.canCreateSidebarFromMessage
);
}
render() {
const {
item,
focused,
toggleFocus,
verticalBounds,
navigation,
route,
overlayContext,
chatContext,
canCreateSidebarFromMessage,
...viewProps
} = this.props;
return (
);
}
}
const styles = StyleSheet.create({
expand: {
flex: 1,
},
});
const ConnectedMultimediaMessage: React.ComponentType =
React.memo(function ConnectedMultimediaMessage(props: BaseProps) {
const navigation = useNavigation();
const route = useRoute();
const overlayContext = React.useContext(OverlayContext);
const chatContext = React.useContext(ChatContext);
const canCreateSidebarFromMessage = useCanCreateSidebarFromMessage(
props.item.threadInfo,
props.item.messageInfo,
);
return (
);
});
export default ConnectedMultimediaMessage;
diff --git a/native/chat/settings/thread-settings-media-gallery.react.js b/native/chat/settings/thread-settings-media-gallery.react.js
index 2c12f2659..bbb89abf4 100644
--- a/native/chat/settings/thread-settings-media-gallery.react.js
+++ b/native/chat/settings/thread-settings-media-gallery.react.js
@@ -1,220 +1,226 @@
// @flow
import { useNavigation, useRoute } from '@react-navigation/native';
import * as React from 'react';
import { View, useWindowDimensions } from 'react-native';
import type { ViewStyleProp } from 'react-native/Libraries/StyleSheet/StyleSheet';
import { FlatList } from 'react-native-gesture-handler';
import { fetchThreadMedia } from 'lib/actions/thread-actions.js';
import type { MediaInfo, Media } from 'lib/types/media-types';
import { useServerCall } from 'lib/utils/action-utils.js';
import GestureTouchableOpacity from '../../components/gesture-touchable-opacity.react.js';
import Multimedia from '../../media/multimedia.react.js';
import {
ImageModalRouteName,
VideoPlaybackModalRouteName,
} from '../../navigation/route-names.js';
import { useStyles } from '../../themes/colors.js';
import type {
LayoutCoordinates,
VerticalBounds,
} from '../../types/layout-types.js';
const galleryItemGap = 8;
const numColumns = 3;
type ThreadSettingsMediaGalleryProps = {
+threadID: string,
+limit: number,
+verticalBounds: ?VerticalBounds,
+offset?: number,
+activeTab?: string,
};
function ThreadSettingsMediaGallery(
props: ThreadSettingsMediaGalleryProps,
): React.Node {
const styles = useStyles(unboundStyles);
const { width } = useWindowDimensions();
// Explanation of galleryItemWidth:
// The FlatList has a horizontal padding of 16px on each side,
// and so the width of the actual FlatList is `width - 32px`.
// With three columns, there will be two gaps in between the items,
// so the width of each item (with the gaps) will be
// (width - 32px - (numColumns-1) * galleryItemGap) / numColumns.
// E.g. 16px, media, galleryItemGap, media, galleryItemGap, media, 16px
const galleryItemWidth =
(width - 32 - (numColumns - 1) * galleryItemGap) / numColumns;
const { threadID, limit, verticalBounds, offset, activeTab } = props;
const [mediaInfos, setMediaInfos] = React.useState([]);
const callFetchThreadMedia = useServerCall(fetchThreadMedia);
React.useEffect(() => {
const fetchData = async () => {
const result = await callFetchThreadMedia({
threadID,
limit,
offset: 0,
});
setMediaInfos(result.media);
};
fetchData();
}, [callFetchThreadMedia, threadID, limit]);
const memoizedStyles = React.useMemo(() => {
return {
mediaContainer: {
marginTop: galleryItemGap,
width: galleryItemWidth,
...styles.mediaContainer,
},
mediaContainerWithMargin: {
marginTop: galleryItemGap,
marginLeft: galleryItemGap,
width: galleryItemWidth,
...styles.mediaContainer,
},
media: {
width: galleryItemWidth,
...styles.media,
},
};
}, [galleryItemWidth, styles.media, styles.mediaContainer]);
const filteredMediaInfos = React.useMemo(() => {
if (activeTab === 'ALL') {
return mediaInfos;
} else if (activeTab === 'IMAGES') {
- return mediaInfos.filter(mediaInfo => mediaInfo.type === 'photo');
+ return mediaInfos.filter(
+ mediaInfo =>
+ mediaInfo.type === 'photo' || mediaInfo.type === 'encrypted_photo',
+ );
} else if (activeTab === 'VIDEOS') {
- return mediaInfos.filter(mediaInfo => mediaInfo.type === 'video');
+ return mediaInfos.filter(
+ mediaInfo =>
+ mediaInfo.type === 'video' || mediaInfo.type === 'encrypted_video',
+ );
}
return mediaInfos;
}, [activeTab, mediaInfos]);
const renderItem = React.useCallback(
({ item, index }) => (
),
[threadID, verticalBounds, memoizedStyles],
);
const onEndReached = React.useCallback(async () => {
// As the FlatList fetches more media, we set the offset to be the length
// of mediaInfos. This will ensure that the next set of media is retrieved
// from the starting point.
const result = await callFetchThreadMedia({
threadID,
limit,
offset: mediaInfos.length,
});
setMediaInfos([...mediaInfos, ...result.media]);
}, [callFetchThreadMedia, mediaInfos, threadID, limit]);
return (
);
}
type MediaGalleryItemProps = {
+item: Media,
+index: number,
+memoizedStyles: {
+mediaContainer: ViewStyleProp,
+mediaContainerWithMargin: ViewStyleProp,
+media: ViewStyleProp,
},
+threadID: string,
+verticalBounds: ?VerticalBounds,
};
function MediaGalleryItem(props: MediaGalleryItemProps): React.Node {
const navigation = useNavigation();
const route = useRoute();
const ref = React.useRef(null);
const onLayout = React.useCallback(() => {}, []);
const { threadID, verticalBounds, memoizedStyles, item, index } = props;
const mediaInfo: MediaInfo = React.useMemo(
() => ({
...(item: Media),
index,
}),
[item, index],
);
const navigateToMedia = React.useCallback(() => {
ref.current?.measure((x, y, width, height, pageX, pageY) => {
const initialCoordinates: LayoutCoordinates = {
x: pageX,
y: pageY,
width,
height,
};
navigation.navigate<'VideoPlaybackModal' | 'ImageModal'>({
name:
- mediaInfo.type === 'video'
+ mediaInfo.type === 'video' || mediaInfo.type === 'encrypted_video'
? VideoPlaybackModalRouteName
: ImageModalRouteName,
key: `multimedia|${threadID}|${mediaInfo.id}`,
params: {
presentedFrom: route.key,
mediaInfo,
item,
initialCoordinates,
verticalBounds,
},
});
});
}, [navigation, route, threadID, mediaInfo, item, verticalBounds]);
const containerStyle =
index % numColumns === 0
? memoizedStyles.mediaContainer
: memoizedStyles.mediaContainerWithMargin;
return (
);
}
const unboundStyles = {
flatListContainer: {
paddingHorizontal: 16,
},
mediaContainer: {
height: 180,
justifyContent: 'center',
alignItems: 'center',
},
media: {
height: 180,
},
};
export default ThreadSettingsMediaGallery;