Fix various issues around OpenGraph representation of media (#14133)
- Fix audio attachments not being represented in OpenGraph tags - Fix audio being represented as "1 image" in OpenGraph descriptions - Fix video metadata being overwritten by paperclip-av-transcoder - Fix embedded player not using Mastodon's UI - Fix audio/video progress bars not moving smoothly - Fix audio/video buffered bars not displaying correctly
This commit is contained in:
parent
e9ff61ca07
commit
662a49dc3f
12 changed files with 117 additions and 41 deletions
|
@ -15,11 +15,13 @@ module StatusesHelper
|
||||||
end
|
end
|
||||||
|
|
||||||
def media_summary(status)
|
def media_summary(status)
|
||||||
attachments = { image: 0, video: 0 }
|
attachments = { image: 0, video: 0, audio: 0 }
|
||||||
|
|
||||||
status.media_attachments.each do |media|
|
status.media_attachments.each do |media|
|
||||||
if media.video?
|
if media.video?
|
||||||
attachments[:video] += 1
|
attachments[:video] += 1
|
||||||
|
elsif media.audio?
|
||||||
|
attachments[:audio] += 1
|
||||||
else
|
else
|
||||||
attachments[:image] += 1
|
attachments[:image] += 1
|
||||||
end
|
end
|
||||||
|
|
|
@ -154,6 +154,7 @@ class Audio extends React.PureComponent {
|
||||||
width: PropTypes.number,
|
width: PropTypes.number,
|
||||||
height: PropTypes.number,
|
height: PropTypes.number,
|
||||||
editable: PropTypes.bool,
|
editable: PropTypes.bool,
|
||||||
|
fullscreen: PropTypes.bool,
|
||||||
intl: PropTypes.object.isRequired,
|
intl: PropTypes.object.isRequired,
|
||||||
cacheWidth: PropTypes.func,
|
cacheWidth: PropTypes.func,
|
||||||
};
|
};
|
||||||
|
@ -180,7 +181,7 @@ class Audio extends React.PureComponent {
|
||||||
|
|
||||||
_setDimensions () {
|
_setDimensions () {
|
||||||
const width = this.player.offsetWidth;
|
const width = this.player.offsetWidth;
|
||||||
const height = width / (16/9);
|
const height = this.props.fullscreen ? this.player.offsetHeight : (width / (16/9));
|
||||||
|
|
||||||
if (this.props.cacheWidth) {
|
if (this.props.cacheWidth) {
|
||||||
this.props.cacheWidth(width);
|
this.props.cacheWidth(width);
|
||||||
|
@ -291,8 +292,10 @@ class Audio extends React.PureComponent {
|
||||||
}
|
}
|
||||||
|
|
||||||
handleProgress = () => {
|
handleProgress = () => {
|
||||||
if (this.audio.buffered.length > 0) {
|
const lastTimeRange = this.audio.buffered.length - 1;
|
||||||
this.setState({ buffer: this.audio.buffered.end(0) / this.audio.duration * 100 });
|
|
||||||
|
if (lastTimeRange > -1) {
|
||||||
|
this.setState({ buffer: Math.ceil(this.audio.buffered.end(lastTimeRange) / this.audio.duration * 100) });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -349,18 +352,18 @@ class Audio extends React.PureComponent {
|
||||||
|
|
||||||
handleMouseMove = throttle(e => {
|
handleMouseMove = throttle(e => {
|
||||||
const { x } = getPointerPosition(this.seek, e);
|
const { x } = getPointerPosition(this.seek, e);
|
||||||
const currentTime = Math.floor(this.audio.duration * x);
|
const currentTime = this.audio.duration * x;
|
||||||
|
|
||||||
if (!isNaN(currentTime)) {
|
if (!isNaN(currentTime)) {
|
||||||
this.setState({ currentTime }, () => {
|
this.setState({ currentTime }, () => {
|
||||||
this.audio.currentTime = currentTime;
|
this.audio.currentTime = currentTime;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}, 60);
|
}, 15);
|
||||||
|
|
||||||
handleTimeUpdate = () => {
|
handleTimeUpdate = () => {
|
||||||
this.setState({
|
this.setState({
|
||||||
currentTime: Math.floor(this.audio.currentTime),
|
currentTime: this.audio.currentTime,
|
||||||
duration: Math.floor(this.audio.duration),
|
duration: Math.floor(this.audio.duration),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -373,7 +376,7 @@ class Audio extends React.PureComponent {
|
||||||
this.audio.volume = x;
|
this.audio.volume = x;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}, 60);
|
}, 15);
|
||||||
|
|
||||||
handleScroll = throttle(() => {
|
handleScroll = throttle(() => {
|
||||||
if (!this.canvas || !this.audio) {
|
if (!this.canvas || !this.audio) {
|
||||||
|
@ -451,6 +454,7 @@ class Audio extends React.PureComponent {
|
||||||
|
|
||||||
_renderCanvas () {
|
_renderCanvas () {
|
||||||
requestAnimationFrame(() => {
|
requestAnimationFrame(() => {
|
||||||
|
this.handleTimeUpdate();
|
||||||
this._clear();
|
this._clear();
|
||||||
this._draw();
|
this._draw();
|
||||||
|
|
||||||
|
@ -622,7 +626,7 @@ class Audio extends React.PureComponent {
|
||||||
const progress = (currentTime / duration) * 100;
|
const progress = (currentTime / duration) * 100;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className={classNames('audio-player', { editable, 'with-light-background': darkText })} ref={this.setPlayerRef} style={{ width: '100%', height: this.state.height || this.props.height }} onMouseEnter={this.handleMouseEnter} onMouseLeave={this.handleMouseLeave}>
|
<div className={classNames('audio-player', { editable, 'with-light-background': darkText })} ref={this.setPlayerRef} style={{ width: '100%', height: this.props.fullscreen ? '100%' : (this.state.height || this.props.height) }} onMouseEnter={this.handleMouseEnter} onMouseLeave={this.handleMouseLeave}>
|
||||||
<audio
|
<audio
|
||||||
src={src}
|
src={src}
|
||||||
ref={this.setAudioRef}
|
ref={this.setAudioRef}
|
||||||
|
@ -630,7 +634,6 @@ class Audio extends React.PureComponent {
|
||||||
onPlay={this.handlePlay}
|
onPlay={this.handlePlay}
|
||||||
onPause={this.handlePause}
|
onPause={this.handlePause}
|
||||||
onProgress={this.handleProgress}
|
onProgress={this.handleProgress}
|
||||||
onTimeUpdate={this.handleTimeUpdate}
|
|
||||||
crossOrigin='anonymous'
|
crossOrigin='anonymous'
|
||||||
/>
|
/>
|
||||||
|
|
||||||
|
@ -691,7 +694,7 @@ class Audio extends React.PureComponent {
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<span className='video-player__time'>
|
<span className='video-player__time'>
|
||||||
<span className='video-player__time-current'>{formatTime(currentTime)}</span>
|
<span className='video-player__time-current'>{formatTime(Math.floor(currentTime))}</span>
|
||||||
<span className='video-player__time-sep'>/</span>
|
<span className='video-player__time-sep'>/</span>
|
||||||
<span className='video-player__time-total'>{formatTime(this.state.duration || Math.floor(this.props.duration))}</span>
|
<span className='video-player__time-total'>{formatTime(this.state.duration || Math.floor(this.props.duration))}</span>
|
||||||
</span>
|
</span>
|
||||||
|
|
|
@ -177,15 +177,26 @@ class Video extends React.PureComponent {
|
||||||
|
|
||||||
handlePlay = () => {
|
handlePlay = () => {
|
||||||
this.setState({ paused: false });
|
this.setState({ paused: false });
|
||||||
|
this._updateTime();
|
||||||
}
|
}
|
||||||
|
|
||||||
handlePause = () => {
|
handlePause = () => {
|
||||||
this.setState({ paused: true });
|
this.setState({ paused: true });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
_updateTime () {
|
||||||
|
requestAnimationFrame(() => {
|
||||||
|
this.handleTimeUpdate();
|
||||||
|
|
||||||
|
if (!this.state.paused) {
|
||||||
|
this._updateTime();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
handleTimeUpdate = () => {
|
handleTimeUpdate = () => {
|
||||||
this.setState({
|
this.setState({
|
||||||
currentTime: Math.floor(this.video.currentTime),
|
currentTime: this.video.currentTime,
|
||||||
duration: Math.floor(this.video.duration),
|
duration: Math.floor(this.video.duration),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -217,7 +228,7 @@ class Video extends React.PureComponent {
|
||||||
this.video.volume = x;
|
this.video.volume = x;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}, 60);
|
}, 15);
|
||||||
|
|
||||||
handleMouseDown = e => {
|
handleMouseDown = e => {
|
||||||
document.addEventListener('mousemove', this.handleMouseMove, true);
|
document.addEventListener('mousemove', this.handleMouseMove, true);
|
||||||
|
@ -245,13 +256,14 @@ class Video extends React.PureComponent {
|
||||||
|
|
||||||
handleMouseMove = throttle(e => {
|
handleMouseMove = throttle(e => {
|
||||||
const { x } = getPointerPosition(this.seek, e);
|
const { x } = getPointerPosition(this.seek, e);
|
||||||
const currentTime = Math.floor(this.video.duration * x);
|
const currentTime = this.video.duration * x;
|
||||||
|
|
||||||
if (!isNaN(currentTime)) {
|
if (!isNaN(currentTime)) {
|
||||||
|
this.setState({ currentTime }, () => {
|
||||||
this.video.currentTime = currentTime;
|
this.video.currentTime = currentTime;
|
||||||
this.setState({ currentTime });
|
});
|
||||||
}
|
}
|
||||||
}, 60);
|
}, 15);
|
||||||
|
|
||||||
togglePlay = () => {
|
togglePlay = () => {
|
||||||
if (this.state.paused) {
|
if (this.state.paused) {
|
||||||
|
@ -387,8 +399,10 @@ class Video extends React.PureComponent {
|
||||||
}
|
}
|
||||||
|
|
||||||
handleProgress = () => {
|
handleProgress = () => {
|
||||||
if (this.video.buffered.length > 0) {
|
const lastTimeRange = this.video.buffered.length - 1;
|
||||||
this.setState({ buffer: this.video.buffered.end(0) / this.video.duration * 100 });
|
|
||||||
|
if (lastTimeRange > -1) {
|
||||||
|
this.setState({ buffer: Math.ceil(this.video.buffered.end(lastTimeRange) / this.video.duration * 100) });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -484,7 +498,6 @@ class Video extends React.PureComponent {
|
||||||
onClick={this.togglePlay}
|
onClick={this.togglePlay}
|
||||||
onPlay={this.handlePlay}
|
onPlay={this.handlePlay}
|
||||||
onPause={this.handlePause}
|
onPause={this.handlePause}
|
||||||
onTimeUpdate={this.handleTimeUpdate}
|
|
||||||
onLoadedData={this.handleLoadedData}
|
onLoadedData={this.handleLoadedData}
|
||||||
onProgress={this.handleProgress}
|
onProgress={this.handleProgress}
|
||||||
onVolumeChange={this.handleVolumeChange}
|
onVolumeChange={this.handleVolumeChange}
|
||||||
|
@ -525,7 +538,7 @@ class Video extends React.PureComponent {
|
||||||
|
|
||||||
{(detailed || fullscreen) && (
|
{(detailed || fullscreen) && (
|
||||||
<span className='video-player__time'>
|
<span className='video-player__time'>
|
||||||
<span className='video-player__time-current'>{formatTime(currentTime)}</span>
|
<span className='video-player__time-current'>{formatTime(Math.floor(currentTime))}</span>
|
||||||
<span className='video-player__time-sep'>/</span>
|
<span className='video-player__time-sep'>/</span>
|
||||||
<span className='video-player__time-total'>{formatTime(duration)}</span>
|
<span className='video-player__time-total'>{formatTime(duration)}</span>
|
||||||
</span>
|
</span>
|
||||||
|
|
|
@ -68,7 +68,32 @@ body {
|
||||||
}
|
}
|
||||||
|
|
||||||
&.player {
|
&.player {
|
||||||
text-align: center;
|
padding: 0;
|
||||||
|
margin: 0;
|
||||||
|
position: absolute;
|
||||||
|
width: 100%;
|
||||||
|
height: 100%;
|
||||||
|
overflow: hidden;
|
||||||
|
|
||||||
|
& > div {
|
||||||
|
height: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
.video-player video {
|
||||||
|
width: 100%;
|
||||||
|
height: 100%;
|
||||||
|
max-height: 100vh;
|
||||||
|
}
|
||||||
|
|
||||||
|
.media-gallery {
|
||||||
|
margin-top: 0;
|
||||||
|
height: 100% !important;
|
||||||
|
border-radius: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.media-gallery__item {
|
||||||
|
border-radius: 0;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
&.embed {
|
&.embed {
|
||||||
|
|
|
@ -194,15 +194,17 @@ class MediaAttachment < ApplicationRecord
|
||||||
|
|
||||||
x, y = (point.is_a?(Enumerable) ? point : point.split(',')).map(&:to_f)
|
x, y = (point.is_a?(Enumerable) ? point : point.split(',')).map(&:to_f)
|
||||||
|
|
||||||
meta = file.instance_read(:meta) || {}
|
meta = (file.instance_read(:meta) || {}).with_indifferent_access.slice(:focus, :original, :small)
|
||||||
meta['focus'] = { 'x' => x, 'y' => y }
|
meta['focus'] = { 'x' => x, 'y' => y }
|
||||||
|
|
||||||
file.instance_write(:meta, meta)
|
file.instance_write(:meta, meta)
|
||||||
end
|
end
|
||||||
|
|
||||||
def focus
|
def focus
|
||||||
x = file.meta['focus']['x']
|
x = file.meta&.dig('focus', 'x')
|
||||||
y = file.meta['focus']['y']
|
y = file.meta&.dig('focus', 'y')
|
||||||
|
|
||||||
|
return if x.nil? || y.nil?
|
||||||
|
|
||||||
"#{x},#{y}"
|
"#{x},#{y}"
|
||||||
end
|
end
|
||||||
|
@ -219,12 +221,11 @@ class MediaAttachment < ApplicationRecord
|
||||||
before_create :prepare_description, unless: :local?
|
before_create :prepare_description, unless: :local?
|
||||||
before_create :set_shortcode
|
before_create :set_shortcode
|
||||||
before_create :set_processing
|
before_create :set_processing
|
||||||
|
before_create :set_meta
|
||||||
|
|
||||||
before_post_process :set_type_and_extension
|
before_post_process :set_type_and_extension
|
||||||
before_post_process :check_video_dimensions
|
before_post_process :check_video_dimensions
|
||||||
|
|
||||||
before_save :set_meta
|
|
||||||
|
|
||||||
class << self
|
class << self
|
||||||
def supported_mime_types
|
def supported_mime_types
|
||||||
IMAGE_MIME_TYPES + VIDEO_MIME_TYPES + AUDIO_MIME_TYPES
|
IMAGE_MIME_TYPES + VIDEO_MIME_TYPES + AUDIO_MIME_TYPES
|
||||||
|
@ -306,15 +307,11 @@ class MediaAttachment < ApplicationRecord
|
||||||
end
|
end
|
||||||
|
|
||||||
def set_meta
|
def set_meta
|
||||||
meta = populate_meta
|
file.instance_write :meta, populate_meta
|
||||||
|
|
||||||
return if meta == {}
|
|
||||||
|
|
||||||
file.instance_write :meta, meta
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def populate_meta
|
def populate_meta
|
||||||
meta = file.instance_read(:meta) || {}
|
meta = (file.instance_read(:meta) || {}).with_indifferent_access.slice(:focus, :original, :small)
|
||||||
|
|
||||||
file.queued_for_write.each do |style, file|
|
file.queued_for_write.each do |style, file|
|
||||||
meta[style] = style == :small || image? ? image_geometry(file) : video_metadata(file)
|
meta[style] = style == :small || image? ? image_geometry(file) : video_metadata(file)
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
= opengraph 'og:title', yield(:page_title).strip
|
= opengraph 'og:title', yield(:page_title).strip
|
||||||
= opengraph 'og:description', description
|
= opengraph 'og:description', description
|
||||||
= opengraph 'og:image', full_asset_url(account.avatar.url(:original))
|
= opengraph 'og:image', full_asset_url(account.avatar.url(:original))
|
||||||
= opengraph 'og:image:width', '120'
|
= opengraph 'og:image:width', '400'
|
||||||
= opengraph 'og:image:height', '120'
|
= opengraph 'og:image:height', '400'
|
||||||
= opengraph 'twitter:card', 'summary'
|
= opengraph 'twitter:card', 'summary'
|
||||||
= opengraph 'profile:username', acct(account)[1..-1]
|
= opengraph 'profile:username', acct(account)[1..-1]
|
||||||
|
|
|
@ -1,2 +1,16 @@
|
||||||
%video{ poster: @media_attachment.file.url(:small), preload: 'auto', autoplay: 'autoplay', muted: 'muted', loop: 'loop', controls: 'controls', style: "width: #{@media_attachment.file.meta.dig('original', 'width')}px; height: #{@media_attachment.file.meta.dig('original', 'height')}px" }
|
- content_for :header_tags do
|
||||||
%source{ src: @media_attachment.file.url(:original), type: @media_attachment.file_content_type }
|
= render_initial_state
|
||||||
|
= javascript_pack_tag 'public', integrity: true, crossorigin: 'anonymous'
|
||||||
|
|
||||||
|
- if @media_attachment.video?
|
||||||
|
= react_component :video, src: @media_attachment.file.url(:original), preview: @media_attachment.file.url(:small), blurhash: @media_attachment.blurhash, width: 670, height: 380, editable: true, detailed: true, inline: true, alt: @media_attachment.description do
|
||||||
|
%video{ controls: 'controls' }
|
||||||
|
%source{ src: @media_attachment.file.url(:original) }
|
||||||
|
- elsif @media_attachment.gifv?
|
||||||
|
= react_component :media_gallery, height: 380, standalone: true, autoplay: true, media: [ActiveModelSerializers::SerializableResource.new(@media_attachment, serializer: REST::MediaAttachmentSerializer).as_json] do
|
||||||
|
%video{ autoplay: 'autoplay', muted: 'muted', loop: 'loop' }
|
||||||
|
%source{ src: @media_attachment.file.url(:original) }
|
||||||
|
- elsif @media_attachment.audio?
|
||||||
|
= react_component :audio, src: @media_attachment.file.url(:original), poster: full_asset_url(@media_attachment.account.avatar_static_url), width: 670, height: 380, fullscreen: true, alt: @media_attachment.description, duration: @media_attachment.file.meta.dig(:original, :duration) do
|
||||||
|
%audio{ controls: 'controls' }
|
||||||
|
%source{ src: @media_attachment.file.url(:original) }
|
||||||
|
|
|
@ -33,7 +33,7 @@
|
||||||
= render partial: 'statuses/attachment_list', locals: { attachments: status.media_attachments }
|
= render partial: 'statuses/attachment_list', locals: { attachments: status.media_attachments }
|
||||||
- elsif status.media_attachments.first.audio?
|
- elsif status.media_attachments.first.audio?
|
||||||
- audio = status.media_attachments.first
|
- audio = status.media_attachments.first
|
||||||
= react_component :audio, src: audio.file.url(:original), height: 130, alt: audio.description, preload: true, duration: audio.file.meta.dig(:original, :duration) do
|
= react_component :audio, src: audio.file.url(:original), poster: full_asset_url(status.account.avatar_static_url), width: 670, height: 380, alt: audio.description, duration: audio.file.meta.dig(:original, :duration) do
|
||||||
= render partial: 'statuses/attachment_list', locals: { attachments: status.media_attachments }
|
= render partial: 'statuses/attachment_list', locals: { attachments: status.media_attachments }
|
||||||
- else
|
- else
|
||||||
= react_component :media_gallery, height: 380, sensitive: status.sensitive?, standalone: true, autoplay: autoplay, media: status.media_attachments.map { |a| ActiveModelSerializers::SerializableResource.new(a, serializer: REST::MediaAttachmentSerializer).as_json } do
|
= react_component :media_gallery, height: 380, sensitive: status.sensitive?, standalone: true, autoplay: autoplay, media: status.media_attachments.map { |a| ActiveModelSerializers::SerializableResource.new(a, serializer: REST::MediaAttachmentSerializer).as_json } do
|
||||||
|
|
|
@ -27,12 +27,25 @@
|
||||||
= opengraph 'og:video:height', media.file.meta.dig('original', 'height')
|
= opengraph 'og:video:height', media.file.meta.dig('original', 'height')
|
||||||
= opengraph 'twitter:player:width', media.file.meta.dig('original', 'width')
|
= opengraph 'twitter:player:width', media.file.meta.dig('original', 'width')
|
||||||
= opengraph 'twitter:player:height', media.file.meta.dig('original', 'height')
|
= opengraph 'twitter:player:height', media.file.meta.dig('original', 'height')
|
||||||
|
- elsif media.audio?
|
||||||
|
- player_card = true
|
||||||
|
= opengraph 'og:image', full_asset_url(account.avatar.url(:original))
|
||||||
|
= opengraph 'og:image:width', '400'
|
||||||
|
= opengraph 'og:image:height','400'
|
||||||
|
= opengraph 'og:audio', full_asset_url(media.file.url(:original))
|
||||||
|
= opengraph 'og:audio:secure_url', full_asset_url(media.file.url(:original))
|
||||||
|
= opengraph 'og:audio:type', media.file_content_type
|
||||||
|
= opengraph 'twitter:player', medium_player_url(media)
|
||||||
|
= opengraph 'twitter:player:stream', full_asset_url(media.file.url(:original))
|
||||||
|
= opengraph 'twitter:player:stream:content_type', media.file_content_type
|
||||||
|
= opengraph 'twitter:player:width', '670'
|
||||||
|
= opengraph 'twitter:player:height', '380'
|
||||||
- if player_card
|
- if player_card
|
||||||
= opengraph 'twitter:card', 'player'
|
= opengraph 'twitter:card', 'player'
|
||||||
- else
|
- else
|
||||||
= opengraph 'twitter:card', 'summary_large_image'
|
= opengraph 'twitter:card', 'summary_large_image'
|
||||||
- else
|
- else
|
||||||
= opengraph 'og:image', full_asset_url(account.avatar.url(:original))
|
= opengraph 'og:image', full_asset_url(account.avatar.url(:original))
|
||||||
= opengraph 'og:image:width', '120'
|
= opengraph 'og:image:width', '400'
|
||||||
= opengraph 'og:image:height','120'
|
= opengraph 'og:image:height','400'
|
||||||
= opengraph 'twitter:card', 'summary'
|
= opengraph 'twitter:card', 'summary'
|
||||||
|
|
|
@ -37,7 +37,7 @@
|
||||||
= render partial: 'statuses/attachment_list', locals: { attachments: status.media_attachments }
|
= render partial: 'statuses/attachment_list', locals: { attachments: status.media_attachments }
|
||||||
- elsif status.media_attachments.first.audio?
|
- elsif status.media_attachments.first.audio?
|
||||||
- audio = status.media_attachments.first
|
- audio = status.media_attachments.first
|
||||||
= react_component :audio, src: audio.file.url(:original), height: 110, alt: audio.description, duration: audio.file.meta.dig(:original, :duration) do
|
= react_component :audio, src: audio.file.url(:original), poster: full_asset_url(status.account.avatar_static_url), width: 610, height: 343, alt: audio.description, duration: audio.file.meta.dig(:original, :duration) do
|
||||||
= render partial: 'statuses/attachment_list', locals: { attachments: status.media_attachments }
|
= render partial: 'statuses/attachment_list', locals: { attachments: status.media_attachments }
|
||||||
- else
|
- else
|
||||||
= react_component :media_gallery, height: 343, sensitive: status.sensitive?, autoplay: autoplay, media: status.media_attachments.map { |a| ActiveModelSerializers::SerializableResource.new(a, serializer: REST::MediaAttachmentSerializer).as_json } do
|
= react_component :media_gallery, height: 343, sensitive: status.sensitive?, autoplay: autoplay, media: status.media_attachments.map { |a| ActiveModelSerializers::SerializableResource.new(a, serializer: REST::MediaAttachmentSerializer).as_json } do
|
||||||
|
|
|
@ -25,8 +25,14 @@ class PostProcessMediaWorker
|
||||||
media_attachment = MediaAttachment.find(media_attachment_id)
|
media_attachment = MediaAttachment.find(media_attachment_id)
|
||||||
media_attachment.processing = :in_progress
|
media_attachment.processing = :in_progress
|
||||||
media_attachment.save
|
media_attachment.save
|
||||||
|
|
||||||
|
# Because paperclip-av-transcover overwrites this attribute
|
||||||
|
# we will save it here and restore it after reprocess is done
|
||||||
|
previous_meta = media_attachment.file_meta
|
||||||
|
|
||||||
media_attachment.file.reprocess!(:original)
|
media_attachment.file.reprocess!(:original)
|
||||||
media_attachment.processing = :complete
|
media_attachment.processing = :complete
|
||||||
|
media_attachment.file_meta = previous_meta
|
||||||
media_attachment.save
|
media_attachment.save
|
||||||
rescue ActiveRecord::RecordNotFound
|
rescue ActiveRecord::RecordNotFound
|
||||||
true
|
true
|
||||||
|
|
|
@ -1117,6 +1117,9 @@ en:
|
||||||
spam_detected: This is an automated report. Spam has been detected.
|
spam_detected: This is an automated report. Spam has been detected.
|
||||||
statuses:
|
statuses:
|
||||||
attached:
|
attached:
|
||||||
|
audio:
|
||||||
|
one: "%{count} audio"
|
||||||
|
other: "%{count} audio"
|
||||||
description: 'Attached: %{attached}'
|
description: 'Attached: %{attached}'
|
||||||
image:
|
image:
|
||||||
one: "%{count} image"
|
one: "%{count} image"
|
||||||
|
|
Loading…
Reference in a new issue